[ 565.871018] env[62476]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62476) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 565.871495] env[62476]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62476) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 565.871574] env[62476]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62476) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 565.871909] env[62476]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 565.969494] env[62476]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62476) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 565.980334] env[62476]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=62476) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 566.126962] env[62476]: INFO nova.virt.driver [None req-f020d124-f1b5-49cc-aa45-8c5bb4cb096e None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 566.203194] env[62476]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.203421] env[62476]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.203469] env[62476]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62476) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 569.445138] env[62476]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-f99a3609-e8c4-4884-a686-4cf8e2c6133a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.461757] env[62476]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62476) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 569.461935] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-88f0317b-e701-4b8d-96cc-7efc045f9143 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.490030] env[62476]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 685d5. [ 569.490222] env[62476]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.287s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.490887] env[62476]: INFO nova.virt.vmwareapi.driver [None req-f020d124-f1b5-49cc-aa45-8c5bb4cb096e None None] VMware vCenter version: 7.0.3 [ 569.494423] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccde9296-df09-4323-bf10-08902bc19cf2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.512398] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f592fb8-2fc6-445c-b4d9-983d230f6ce8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.518911] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4fc12b-1c8d-41f7-a0e4-39c3ba0c0e04 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.525866] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6caf057-79b1-4997-9d6e-2ca57e392547 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.539349] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96255110-7236-4f13-8e18-a1ff715c525b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.546108] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d578bc4-022e-4f7f-ba7a-bf59a052c8ab {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.577363] env[62476]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-7f07b878-67d2-4b4c-95e1-61729a15f6cd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.583147] env[62476]: DEBUG nova.virt.vmwareapi.driver [None req-f020d124-f1b5-49cc-aa45-8c5bb4cb096e None None] Extension org.openstack.compute already exists. {{(pid=62476) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 569.585833] env[62476]: INFO nova.compute.provider_config [None req-f020d124-f1b5-49cc-aa45-8c5bb4cb096e None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 569.603568] env[62476]: DEBUG nova.context [None req-f020d124-f1b5-49cc-aa45-8c5bb4cb096e None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),4935c2d5-9fc4-40f5-ba7f-616de7c3dcc1(cell1) {{(pid=62476) load_cells /opt/stack/nova/nova/context.py:464}} [ 569.605582] env[62476]: DEBUG oslo_concurrency.lockutils [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.605812] env[62476]: DEBUG oslo_concurrency.lockutils [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.606499] env[62476]: DEBUG oslo_concurrency.lockutils [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.606934] env[62476]: DEBUG oslo_concurrency.lockutils [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Acquiring lock "4935c2d5-9fc4-40f5-ba7f-616de7c3dcc1" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.607143] env[62476]: DEBUG oslo_concurrency.lockutils [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Lock "4935c2d5-9fc4-40f5-ba7f-616de7c3dcc1" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.608155] env[62476]: DEBUG oslo_concurrency.lockutils [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Lock "4935c2d5-9fc4-40f5-ba7f-616de7c3dcc1" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.629799] env[62476]: INFO dbcounter [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Registered counter for database nova_cell0 [ 569.638832] env[62476]: INFO dbcounter [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Registered counter for database nova_cell1 [ 569.642210] env[62476]: DEBUG oslo_db.sqlalchemy.engines [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62476) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 569.642525] env[62476]: DEBUG oslo_db.sqlalchemy.engines [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62476) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 569.647336] env[62476]: DEBUG dbcounter [-] [62476] Writer thread running {{(pid=62476) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 569.648098] env[62476]: DEBUG dbcounter [-] [62476] Writer thread running {{(pid=62476) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 569.650343] env[62476]: ERROR nova.db.main.api [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 569.650343] env[62476]: result = function(*args, **kwargs) [ 569.650343] env[62476]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 569.650343] env[62476]: return func(*args, **kwargs) [ 569.650343] env[62476]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 569.650343] env[62476]: result = fn(*args, **kwargs) [ 569.650343] env[62476]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 569.650343] env[62476]: return f(*args, **kwargs) [ 569.650343] env[62476]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 569.650343] env[62476]: return db.service_get_minimum_version(context, binaries) [ 569.650343] env[62476]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 569.650343] env[62476]: _check_db_access() [ 569.650343] env[62476]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 569.650343] env[62476]: stacktrace = ''.join(traceback.format_stack()) [ 569.650343] env[62476]: [ 569.651485] env[62476]: ERROR nova.db.main.api [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 569.651485] env[62476]: result = function(*args, **kwargs) [ 569.651485] env[62476]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 569.651485] env[62476]: return func(*args, **kwargs) [ 569.651485] env[62476]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 569.651485] env[62476]: result = fn(*args, **kwargs) [ 569.651485] env[62476]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 569.651485] env[62476]: return f(*args, **kwargs) [ 569.651485] env[62476]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 569.651485] env[62476]: return db.service_get_minimum_version(context, binaries) [ 569.651485] env[62476]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 569.651485] env[62476]: _check_db_access() [ 569.651485] env[62476]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 569.651485] env[62476]: stacktrace = ''.join(traceback.format_stack()) [ 569.651485] env[62476]: [ 569.651912] env[62476]: WARNING nova.objects.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Failed to get minimum service version for cell 4935c2d5-9fc4-40f5-ba7f-616de7c3dcc1 [ 569.652037] env[62476]: WARNING nova.objects.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 569.652486] env[62476]: DEBUG oslo_concurrency.lockutils [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Acquiring lock "singleton_lock" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.652649] env[62476]: DEBUG oslo_concurrency.lockutils [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Acquired lock "singleton_lock" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.652902] env[62476]: DEBUG oslo_concurrency.lockutils [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Releasing lock "singleton_lock" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.653245] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Full set of CONF: {{(pid=62476) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 569.653396] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ******************************************************************************** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 569.653529] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] Configuration options gathered from: {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 569.653669] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 569.653862] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 569.653996] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ================================================================================ {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 569.654225] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] allow_resize_to_same_host = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.654405] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] arq_binding_timeout = 300 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.654539] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] backdoor_port = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.654671] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] backdoor_socket = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.654843] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] block_device_allocate_retries = 60 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.655019] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] block_device_allocate_retries_interval = 3 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.655198] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cert = self.pem {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.655370] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.655594] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute_monitors = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.655779] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] config_dir = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.655960] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] config_drive_format = iso9660 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.656117] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.656286] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] config_source = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.656458] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] console_host = devstack {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.656717] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] control_exchange = nova {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.656806] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cpu_allocation_ratio = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.656961] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] daemon = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.657148] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] debug = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.657313] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] default_access_ip_network_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.657482] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] default_availability_zone = nova {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.657642] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] default_ephemeral_format = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.657804] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] default_green_pool_size = 1000 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.658054] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.658229] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] default_schedule_zone = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.658391] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] disk_allocation_ratio = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.658556] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] enable_new_services = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.658756] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] enabled_apis = ['osapi_compute'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.658937] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] enabled_ssl_apis = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.659115] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] flat_injected = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.659282] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] force_config_drive = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.659442] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] force_raw_images = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.659614] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] graceful_shutdown_timeout = 5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.659803] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] heal_instance_info_cache_interval = 60 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.660052] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] host = cpu-1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.660237] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.660404] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.660567] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.660784] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.660952] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] instance_build_timeout = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.661224] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] instance_delete_interval = 300 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.661455] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] instance_format = [instance: %(uuid)s] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.661644] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] instance_name_template = instance-%08x {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.661814] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] instance_usage_audit = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.661993] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] instance_usage_audit_period = month {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.662188] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.662454] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.662512] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] internal_service_availability_zone = internal {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.662671] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] key = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.662834] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] live_migration_retry_count = 30 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.663015] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] log_config_append = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.663192] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.663372] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] log_dir = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.663602] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] log_file = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.663749] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] log_options = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.663924] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] log_rotate_interval = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.664117] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] log_rotate_interval_type = days {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.664297] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] log_rotation_type = none {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.664434] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.664570] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.664752] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.664941] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.665087] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.665275] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] long_rpc_timeout = 1800 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.665450] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] max_concurrent_builds = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.665615] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] max_concurrent_live_migrations = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.665780] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] max_concurrent_snapshots = 5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.665948] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] max_local_block_devices = 3 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.666140] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] max_logfile_count = 30 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.666311] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] max_logfile_size_mb = 200 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.666475] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] maximum_instance_delete_attempts = 5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.666657] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] metadata_listen = 0.0.0.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.666838] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] metadata_listen_port = 8775 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.667043] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] metadata_workers = 2 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.667219] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] migrate_max_retries = -1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.667394] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] mkisofs_cmd = genisoimage {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.667611] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.667748] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] my_ip = 10.180.1.21 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.667916] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] network_allocate_retries = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.668113] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.668291] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.668459] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] osapi_compute_listen_port = 8774 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.668631] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] osapi_compute_unique_server_name_scope = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.668832] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] osapi_compute_workers = 2 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.669014] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] password_length = 12 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.669194] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] periodic_enable = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.669356] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] periodic_fuzzy_delay = 60 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.669529] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] pointer_model = usbtablet {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.669700] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] preallocate_images = none {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.669899] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] publish_errors = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.670064] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] pybasedir = /opt/stack/nova {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.670227] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ram_allocation_ratio = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.670396] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] rate_limit_burst = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.670565] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] rate_limit_except_level = CRITICAL {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.670728] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] rate_limit_interval = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.670893] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] reboot_timeout = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.671089] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] reclaim_instance_interval = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.671277] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] record = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.671453] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] reimage_timeout_per_gb = 60 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.671623] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] report_interval = 120 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.671790] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] rescue_timeout = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.671955] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] reserved_host_cpus = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.672132] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] reserved_host_disk_mb = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.672296] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] reserved_host_memory_mb = 512 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.672521] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] reserved_huge_pages = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.672661] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] resize_confirm_window = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.672782] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] resize_fs_using_block_device = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.672945] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] resume_guests_state_on_host_boot = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.673136] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.673299] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] rpc_response_timeout = 60 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.673458] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] run_external_periodic_tasks = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.673631] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] running_deleted_instance_action = reap {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.673795] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.673958] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] running_deleted_instance_timeout = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.674129] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] scheduler_instance_sync_interval = 120 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.674301] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] service_down_time = 720 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.674472] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] servicegroup_driver = db {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.674633] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] shelved_offload_time = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.674792] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] shelved_poll_interval = 3600 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.674960] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] shutdown_timeout = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.675165] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] source_is_ipv6 = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.675340] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ssl_only = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.675605] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.675777] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] sync_power_state_interval = 600 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.675946] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] sync_power_state_pool_size = 1000 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.676130] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] syslog_log_facility = LOG_USER {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.676293] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] tempdir = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.676457] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] timeout_nbd = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.676642] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] transport_url = **** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.676806] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] update_resources_interval = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.676973] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] use_cow_images = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.677150] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] use_eventlog = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.677314] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] use_journal = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.677476] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] use_json = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.677636] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] use_rootwrap_daemon = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.677798] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] use_stderr = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.677958] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] use_syslog = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.678136] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vcpu_pin_set = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.678315] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vif_plugging_is_fatal = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.678485] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vif_plugging_timeout = 300 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.678654] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] virt_mkfs = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.678851] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] volume_usage_poll_interval = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.679033] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] watch_log_file = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.679215] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] web = /usr/share/spice-html5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.679409] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_concurrency.disable_process_locking = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.679724] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.679940] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.680129] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.680336] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.680478] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.680648] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.680835] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.auth_strategy = keystone {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.681019] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.compute_link_prefix = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.681240] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.681422] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.dhcp_domain = novalocal {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.681594] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.enable_instance_password = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.681762] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.glance_link_prefix = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.681933] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.682124] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.682297] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.instance_list_per_project_cells = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.682464] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.list_records_by_skipping_down_cells = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.682636] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.local_metadata_per_cell = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.682794] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.max_limit = 1000 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.682963] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.metadata_cache_expiration = 15 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.683159] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.neutron_default_tenant_id = default {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.683330] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.use_neutron_default_nets = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.683512] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.683678] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.683852] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.684038] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.684216] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.vendordata_dynamic_targets = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.684391] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.vendordata_jsonfile_path = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.684576] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.684991] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.backend = dogpile.cache.memcached {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.684991] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.backend_argument = **** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.685113] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.config_prefix = cache.oslo {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.685292] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.dead_timeout = 60.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.685464] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.debug_cache_backend = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.685629] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.enable_retry_client = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.685795] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.enable_socket_keepalive = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.685969] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.enabled = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.686151] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.enforce_fips_mode = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.686320] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.expiration_time = 600 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.686484] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.hashclient_retry_attempts = 2 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.686651] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.686823] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.memcache_dead_retry = 300 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.686986] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.memcache_password = **** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.687164] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.687329] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.687493] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.memcache_pool_maxsize = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.687657] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.687821] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.memcache_sasl_enabled = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.688016] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.688196] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.688360] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.memcache_username = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.688529] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.proxies = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.688691] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.redis_password = **** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.688898] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.689097] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.689276] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.redis_server = localhost:6379 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.689447] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.redis_socket_timeout = 1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.689612] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.redis_username = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.689808] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.retry_attempts = 2 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.690020] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.retry_delay = 0.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.690198] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.socket_keepalive_count = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.690370] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.socket_keepalive_idle = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.690539] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.socket_keepalive_interval = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.690704] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.tls_allowed_ciphers = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.690868] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.tls_cafile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.691052] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.tls_certfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.691249] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.tls_enabled = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.691418] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cache.tls_keyfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.691598] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.auth_section = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.691779] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.auth_type = password {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.691949] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.cafile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.692146] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.692314] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.certfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.692484] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.collect_timing = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.692651] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.cross_az_attach = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.692825] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.debug = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.692985] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.endpoint_template = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.693163] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.http_retries = 3 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.693331] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.insecure = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.693496] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.keyfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.693672] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.os_region_name = RegionOne {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.693842] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.split_loggers = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.694019] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cinder.timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.694196] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.694360] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute.cpu_dedicated_set = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.694521] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute.cpu_shared_set = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.694692] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute.image_type_exclude_list = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.694859] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.695039] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.695236] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.695380] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.695562] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.695726] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute.resource_provider_association_refresh = 300 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.695894] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.696071] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute.shutdown_retry_interval = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.696258] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.696440] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] conductor.workers = 2 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.696618] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] console.allowed_origins = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.696785] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] console.ssl_ciphers = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.696959] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] console.ssl_minimum_version = default {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.697144] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] consoleauth.enforce_session_timeout = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.697316] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] consoleauth.token_ttl = 600 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.697486] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.cafile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.697646] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.certfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.697813] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.collect_timing = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.697978] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.connect_retries = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.698150] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.connect_retry_delay = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.698310] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.endpoint_override = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.698476] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.insecure = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.698639] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.keyfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.698838] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.max_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.699018] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.min_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.699181] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.region_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.699343] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.retriable_status_codes = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.699504] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.service_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.699686] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.service_type = accelerator {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.699869] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.split_loggers = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.700048] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.status_code_retries = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.700214] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.status_code_retry_delay = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.700394] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.701029] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.701029] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] cyborg.version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.701029] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.backend = sqlalchemy {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704337] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.connection = **** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704337] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.connection_debug = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704337] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.connection_parameters = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704337] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.connection_recycle_time = 3600 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704337] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.connection_trace = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704337] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.db_inc_retry_interval = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704495] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.db_max_retries = 20 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704495] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.db_max_retry_interval = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704495] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.db_retry_interval = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704495] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.max_overflow = 50 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704495] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.max_pool_size = 5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704495] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.max_retries = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704668] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704668] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.mysql_wsrep_sync_wait = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704668] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.pool_timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704668] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.retry_interval = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704668] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.slave_connection = **** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704668] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.sqlite_synchronous = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704818] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] database.use_db_reconnect = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704818] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.backend = sqlalchemy {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704818] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.connection = **** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.704902] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.connection_debug = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.705216] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.connection_parameters = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.705216] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.connection_recycle_time = 3600 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.705348] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.connection_trace = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.705509] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.db_inc_retry_interval = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.705678] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.db_max_retries = 20 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.705892] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.db_max_retry_interval = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.706132] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.db_retry_interval = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.706300] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.max_overflow = 50 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.706469] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.max_pool_size = 5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.706634] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.max_retries = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.706811] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.706976] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.707151] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.pool_timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.707318] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.retry_interval = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.707480] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.slave_connection = **** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.707643] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] api_database.sqlite_synchronous = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.707819] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] devices.enabled_mdev_types = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.708006] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.708190] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.708354] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ephemeral_storage_encryption.enabled = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.708521] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.708691] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.api_servers = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.708891] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.cafile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.709075] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.certfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.709245] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.collect_timing = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.709406] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.connect_retries = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.709566] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.connect_retry_delay = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.709755] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.debug = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.709941] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.default_trusted_certificate_ids = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.710125] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.enable_certificate_validation = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.710295] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.enable_rbd_download = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.710460] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.endpoint_override = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.710629] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.insecure = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.710798] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.keyfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.710958] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.max_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.711160] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.min_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.711343] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.num_retries = 3 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.711517] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.rbd_ceph_conf = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.711682] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.rbd_connect_timeout = 5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.711853] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.rbd_pool = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.712034] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.rbd_user = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.712201] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.region_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.712364] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.retriable_status_codes = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.712525] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.service_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.712696] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.service_type = image {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.712863] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.split_loggers = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.713037] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.status_code_retries = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.713202] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.status_code_retry_delay = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.713360] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.713544] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.713713] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.verify_glance_signatures = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.713878] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] glance.version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.714058] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] guestfs.debug = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.714240] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] mks.enabled = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.714612] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.714811] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] image_cache.manager_interval = 2400 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.714987] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] image_cache.precache_concurrency = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.715179] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] image_cache.remove_unused_base_images = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.715355] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.715528] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.715712] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] image_cache.subdirectory_name = _base {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.715897] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.api_max_retries = 60 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.716079] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.api_retry_interval = 2 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.716329] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.auth_section = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.716526] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.auth_type = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.716697] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.cafile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.716861] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.certfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.717047] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.collect_timing = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.717225] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.conductor_group = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.717391] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.connect_retries = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.717555] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.connect_retry_delay = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.717719] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.endpoint_override = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.717891] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.insecure = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.718065] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.keyfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.718231] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.max_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.718393] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.min_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.718559] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.peer_list = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.718738] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.region_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.718926] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.retriable_status_codes = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.719111] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.serial_console_state_timeout = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.719280] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.service_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.719455] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.service_type = baremetal {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.719620] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.shard = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.719819] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.split_loggers = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.720012] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.status_code_retries = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.720193] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.status_code_retry_delay = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.720352] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.720537] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.720704] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ironic.version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.720897] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.721089] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] key_manager.fixed_key = **** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.721282] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.721451] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.barbican_api_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.721611] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.barbican_endpoint = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.721785] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.barbican_endpoint_type = public {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.721951] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.barbican_region_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.722130] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.cafile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.722296] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.certfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.722464] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.collect_timing = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.722630] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.insecure = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.722794] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.keyfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.722967] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.number_of_retries = 60 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.723147] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.retry_delay = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.723313] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.send_service_user_token = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.723478] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.split_loggers = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.723640] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.723806] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.verify_ssl = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.723972] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican.verify_ssl_path = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.724155] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican_service_user.auth_section = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.724324] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican_service_user.auth_type = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.724487] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican_service_user.cafile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.724648] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican_service_user.certfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.724814] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican_service_user.collect_timing = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.724982] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican_service_user.insecure = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.725158] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican_service_user.keyfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.725325] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican_service_user.split_loggers = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.725489] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] barbican_service_user.timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.725660] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.approle_role_id = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.725825] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.approle_secret_id = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.725988] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.cafile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.726162] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.certfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.726329] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.collect_timing = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.726491] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.insecure = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.726653] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.keyfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.726827] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.kv_mountpoint = secret {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.726992] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.kv_path = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.727177] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.kv_version = 2 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.727342] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.namespace = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.727504] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.root_token_id = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.727668] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.split_loggers = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.727838] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.ssl_ca_crt_file = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.728014] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.728187] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.use_ssl = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.728360] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.728531] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.auth_section = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.728701] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.auth_type = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.728894] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.cafile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.729076] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.certfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.729250] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.collect_timing = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.729413] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.connect_retries = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.729574] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.connect_retry_delay = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.729765] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.endpoint_override = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.730031] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.insecure = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.730231] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.keyfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.730402] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.max_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.730566] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.min_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.730730] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.region_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.730896] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.retriable_status_codes = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.731073] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.service_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.731250] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.service_type = identity {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.731419] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.split_loggers = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.731584] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.status_code_retries = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.731746] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.status_code_retry_delay = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.731910] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.732105] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.732274] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] keystone.version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.732482] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.connection_uri = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.732648] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.cpu_mode = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.732817] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.732995] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.cpu_models = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.733203] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.cpu_power_governor_high = performance {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.733390] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.733520] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.cpu_power_management = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.733696] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.733864] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.device_detach_attempts = 8 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.734042] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.device_detach_timeout = 20 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.734215] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.disk_cachemodes = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.734379] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.disk_prefix = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.734546] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.enabled_perf_events = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.734713] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.file_backed_memory = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.734885] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.gid_maps = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.735053] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.hw_disk_discard = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.735227] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.hw_machine_type = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.735409] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.images_rbd_ceph_conf = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.735577] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.735744] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.735916] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.images_rbd_glance_store_name = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.736100] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.images_rbd_pool = rbd {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.736280] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.images_type = default {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.736446] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.images_volume_group = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.736611] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.inject_key = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.736779] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.inject_partition = -2 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.736947] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.inject_password = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.737127] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.iscsi_iface = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.737296] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.iser_use_multipath = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.737465] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.737630] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.737795] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.live_migration_downtime = 500 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.737960] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.738136] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.738301] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.live_migration_inbound_addr = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.738465] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.738627] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.738833] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.live_migration_scheme = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.739021] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.live_migration_timeout_action = abort {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.739199] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.live_migration_tunnelled = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.739415] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.live_migration_uri = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.739533] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.live_migration_with_native_tls = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.739698] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.max_queues = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.739889] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.740146] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.740317] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.nfs_mount_options = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.740623] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.740802] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.740975] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.741238] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.741547] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.741857] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.num_pcie_ports = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.742179] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.742474] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.pmem_namespaces = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.742678] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.quobyte_client_cfg = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.742991] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.743192] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.743373] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.743546] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.743714] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.rbd_secret_uuid = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.743884] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.rbd_user = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.744074] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.744262] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.744431] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.rescue_image_id = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.744598] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.rescue_kernel_id = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.744781] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.rescue_ramdisk_id = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.745027] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.745202] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.rx_queue_size = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.745381] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.smbfs_mount_options = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.745668] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.745873] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.snapshot_compression = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.746070] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.snapshot_image_format = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.746305] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.746482] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.sparse_logical_volumes = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.746655] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.swtpm_enabled = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.746834] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.swtpm_group = tss {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.747014] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.swtpm_user = tss {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.747199] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.sysinfo_serial = unique {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.747365] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.tb_cache_size = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.747530] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.tx_queue_size = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.747701] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.uid_maps = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.747870] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.use_virtio_for_bridges = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.748059] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.virt_type = kvm {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.748239] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.volume_clear = zero {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.748409] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.volume_clear_size = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.748578] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.volume_use_multipath = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.748762] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.vzstorage_cache_path = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.748955] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.749147] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.749321] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.749497] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.749834] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.750123] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.vzstorage_mount_user = stack {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.750324] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.750511] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.auth_section = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.750694] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.auth_type = password {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.750862] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.cafile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.751042] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.certfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.751216] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.collect_timing = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.751381] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.connect_retries = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.751545] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.connect_retry_delay = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.751720] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.default_floating_pool = public {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.751888] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.endpoint_override = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.752062] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.extension_sync_interval = 600 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.752237] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.http_retries = 3 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.752405] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.insecure = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.752567] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.keyfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.752730] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.max_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.752907] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.753083] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.min_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.753283] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.ovs_bridge = br-int {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.753436] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.physnets = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.753605] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.region_name = RegionOne {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.753860] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.retriable_status_codes = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.754174] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.service_metadata_proxy = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.754468] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.service_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.754679] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.service_type = network {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.754959] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.split_loggers = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.755097] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.status_code_retries = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.755278] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.status_code_retry_delay = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.755447] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.755637] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.755804] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] neutron.version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.756010] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] notifications.bdms_in_notifications = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.756255] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] notifications.default_level = INFO {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.756447] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] notifications.notification_format = unversioned {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.756620] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] notifications.notify_on_state_change = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.756806] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.756988] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] pci.alias = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.757177] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] pci.device_spec = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.757349] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] pci.report_in_placement = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.757527] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.auth_section = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.757706] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.auth_type = password {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.757879] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.758055] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.cafile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.758224] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.certfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.758392] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.collect_timing = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.758556] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.connect_retries = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.758742] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.connect_retry_delay = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.758913] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.default_domain_id = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.759188] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.default_domain_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.759395] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.domain_id = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.759566] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.domain_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.759755] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.endpoint_override = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.759940] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.insecure = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.760121] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.keyfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.760286] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.max_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.760448] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.min_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.760619] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.password = **** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.760784] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.project_domain_id = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.760954] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.project_domain_name = Default {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.761136] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.project_id = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.761311] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.project_name = service {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.761486] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.region_name = RegionOne {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.761650] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.retriable_status_codes = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.761814] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.service_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.761991] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.service_type = placement {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.762170] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.split_loggers = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.762338] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.status_code_retries = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.762501] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.status_code_retry_delay = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.762664] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.system_scope = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.762824] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.762987] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.trust_id = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.763164] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.user_domain_id = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.763335] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.user_domain_name = Default {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.763516] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.user_id = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.763709] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.username = placement {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.763851] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.764028] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] placement.version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.764218] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] quota.cores = 20 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.764388] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] quota.count_usage_from_placement = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.764564] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.764746] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] quota.injected_file_content_bytes = 10240 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.764919] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] quota.injected_file_path_length = 255 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.765100] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] quota.injected_files = 5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.765274] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] quota.instances = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.765444] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] quota.key_pairs = 100 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.765615] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] quota.metadata_items = 128 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.765786] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] quota.ram = 51200 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.765958] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] quota.recheck_quota = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.766144] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] quota.server_group_members = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.766316] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] quota.server_groups = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.766490] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.766660] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.766828] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] scheduler.image_metadata_prefilter = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.767018] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.767205] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] scheduler.max_attempts = 3 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.767375] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] scheduler.max_placement_results = 1000 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.767544] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.767711] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.767878] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.768067] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] scheduler.workers = 2 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.768253] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.768430] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.768614] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.768819] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.768991] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.769181] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.769347] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.769538] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.769727] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.host_subset_size = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.769910] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.770094] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.770269] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.770438] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.isolated_hosts = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.770607] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.isolated_images = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.770775] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.770947] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.771134] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.771303] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.pci_in_placement = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.771507] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.771706] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.771884] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.772073] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.772237] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.772405] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.772571] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.track_instance_changes = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.772753] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.772930] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] metrics.required = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.773112] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] metrics.weight_multiplier = 1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.773282] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.773452] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] metrics.weight_setting = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.773814] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.773957] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] serial_console.enabled = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.774156] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] serial_console.port_range = 10000:20000 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.774333] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.774507] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.774681] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] serial_console.serialproxy_port = 6083 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.774854] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] service_user.auth_section = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.775043] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] service_user.auth_type = password {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.775214] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] service_user.cafile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.775377] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] service_user.certfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.775543] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] service_user.collect_timing = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.775707] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] service_user.insecure = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.775870] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] service_user.keyfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.776056] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] service_user.send_service_user_token = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.776229] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] service_user.split_loggers = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.776392] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] service_user.timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.776580] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] spice.agent_enabled = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.776747] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] spice.enabled = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.777088] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.777294] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.777471] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] spice.html5proxy_port = 6082 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.777641] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] spice.image_compression = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.777813] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] spice.jpeg_compression = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.777978] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] spice.playback_compression = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.778167] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] spice.server_listen = 127.0.0.1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.778345] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.778509] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] spice.streaming_mode = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.778671] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] spice.zlib_compression = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.778879] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] upgrade_levels.baseapi = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.779076] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] upgrade_levels.compute = auto {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.779248] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] upgrade_levels.conductor = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.779414] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] upgrade_levels.scheduler = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.779585] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.779773] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.779955] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vendordata_dynamic_auth.cafile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.780133] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vendordata_dynamic_auth.certfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.780303] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.780470] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vendordata_dynamic_auth.insecure = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.780632] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.780799] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.780962] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vendordata_dynamic_auth.timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.781155] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.api_retry_count = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.781321] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.ca_file = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.781499] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.781671] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.cluster_name = testcl1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.781841] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.connection_pool_size = 10 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.782015] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.console_delay_seconds = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.782194] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.datastore_regex = ^datastore.* {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.782408] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.782588] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.host_password = **** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.782762] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.host_port = 443 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.782939] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.host_username = administrator@vsphere.local {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.783126] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.insecure = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.783297] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.integration_bridge = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.783466] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.maximum_objects = 100 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.783630] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.pbm_default_policy = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.783802] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.pbm_enabled = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.783974] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.pbm_wsdl_location = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.784447] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.784447] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.serial_port_proxy_uri = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.784548] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.serial_port_service_uri = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.784698] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.task_poll_interval = 0.5 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.784876] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.use_linked_clone = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.785067] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.vnc_keymap = en-us {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.785240] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.vnc_port = 5900 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.785408] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vmware.vnc_port_total = 10000 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.785600] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vnc.auth_schemes = ['none'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.785780] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vnc.enabled = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.786151] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.786379] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.786563] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vnc.novncproxy_port = 6080 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.786748] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vnc.server_listen = 127.0.0.1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.786932] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.787112] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vnc.vencrypt_ca_certs = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.787282] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vnc.vencrypt_client_cert = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.787446] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vnc.vencrypt_client_key = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.787632] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.787830] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.disable_deep_image_inspection = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.788034] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.788209] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.788376] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.788584] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.disable_rootwrap = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.788789] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.enable_numa_live_migration = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.788965] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.789148] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.789316] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.789480] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.libvirt_disable_apic = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.789645] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.789838] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.790019] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.790190] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.790357] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.790521] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.790685] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.790850] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.791024] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.791192] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.791383] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.791557] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] wsgi.client_socket_timeout = 900 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.791730] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] wsgi.default_pool_size = 1000 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.791903] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] wsgi.keep_alive = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.792086] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] wsgi.max_header_line = 16384 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.792303] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.792421] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] wsgi.ssl_ca_file = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.792583] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] wsgi.ssl_cert_file = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.792745] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] wsgi.ssl_key_file = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.792915] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] wsgi.tcp_keepidle = 600 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.793107] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.793282] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] zvm.ca_file = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.793447] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] zvm.cloud_connector_url = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.793748] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.793932] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] zvm.reachable_timeout = 300 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.794130] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_policy.enforce_new_defaults = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.794307] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_policy.enforce_scope = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.794486] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_policy.policy_default_rule = default {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.794669] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.794845] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_policy.policy_file = policy.yaml {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.795031] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.795201] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.795362] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.795522] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.795685] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.795855] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.796046] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.796231] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] profiler.connection_string = messaging:// {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.796419] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] profiler.enabled = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.796567] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] profiler.es_doc_type = notification {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.796733] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] profiler.es_scroll_size = 10000 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.796906] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] profiler.es_scroll_time = 2m {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.797085] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] profiler.filter_error_trace = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.797261] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] profiler.hmac_keys = **** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.797431] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] profiler.sentinel_service_name = mymaster {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.797604] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] profiler.socket_timeout = 0.1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.797773] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] profiler.trace_requests = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.797938] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] profiler.trace_sqlalchemy = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.798131] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] profiler_jaeger.process_tags = {} {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.798300] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] profiler_jaeger.service_name_prefix = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.798469] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] profiler_otlp.service_name_prefix = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.798638] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] remote_debug.host = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.798830] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] remote_debug.port = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.799031] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.799205] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.799371] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.799533] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.799707] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.799930] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.800119] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.800288] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.800453] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.800626] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.800789] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.800965] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.801150] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.801323] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.801496] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.801666] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.801831] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.802015] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.802189] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.802354] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.802531] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.802686] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.802851] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.803030] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.803200] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.803362] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.803525] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.803686] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.803848] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.804028] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.ssl = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.804198] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.804372] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.804535] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.804708] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.804883] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.805059] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.805258] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.805430] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_notifications.retry = -1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.805612] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.805789] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.805968] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.auth_section = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.806148] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.auth_type = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.806312] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.cafile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.806473] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.certfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.806640] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.collect_timing = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.806800] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.connect_retries = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.806961] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.connect_retry_delay = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.807133] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.endpoint_id = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.807293] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.endpoint_override = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.807457] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.insecure = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.807615] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.keyfile = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.807775] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.max_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.807972] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.min_version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.808159] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.region_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.808324] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.retriable_status_codes = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.808485] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.service_name = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.808645] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.service_type = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.808847] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.split_loggers = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.809033] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.status_code_retries = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.809203] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.status_code_retry_delay = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.809364] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.timeout = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.809525] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.valid_interfaces = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.809685] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_limit.version = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.809854] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_reports.file_event_handler = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.810070] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.810245] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] oslo_reports.log_dir = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.810421] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.810585] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.810747] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.810918] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.811126] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.811309] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.811483] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.811644] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vif_plug_ovs_privileged.group = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.811805] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.811976] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.812157] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.812319] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] vif_plug_ovs_privileged.user = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.812490] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.812671] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.812848] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.813034] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.813212] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.813381] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.813549] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.813712] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.813896] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.814082] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_ovs.isolate_vif = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.814273] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.814429] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.814603] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.814777] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.814945] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_vif_ovs.per_port_bridge = False {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.815127] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_brick.lock_path = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.815300] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.815464] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.815637] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] privsep_osbrick.capabilities = [21] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.815799] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] privsep_osbrick.group = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.815961] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] privsep_osbrick.helper_command = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.816142] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.816311] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.816472] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] privsep_osbrick.user = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.816647] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.816808] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] nova_sys_admin.group = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.816969] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] nova_sys_admin.helper_command = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.817151] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.817318] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.817478] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] nova_sys_admin.user = None {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.817611] env[62476]: DEBUG oslo_service.service [None req-68241ad0-0b94-484e-9ee2-c326b6d3a456 None None] ******************************************************************************** {{(pid=62476) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 569.818422] env[62476]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 569.829028] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Getting list of instances from cluster (obj){ [ 569.829028] env[62476]: value = "domain-c8" [ 569.829028] env[62476]: _type = "ClusterComputeResource" [ 569.829028] env[62476]: } {{(pid=62476) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 569.830362] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae55f0b9-0e00-4418-8638-d3647398552d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.839865] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Got total of 0 instances {{(pid=62476) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 569.840467] env[62476]: WARNING nova.virt.vmwareapi.driver [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 569.840951] env[62476]: INFO nova.virt.node [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Generated node identity 0cae7a3c-64e3-4b86-8a81-24d587f58f11 [ 569.841232] env[62476]: INFO nova.virt.node [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Wrote node identity 0cae7a3c-64e3-4b86-8a81-24d587f58f11 to /opt/stack/data/n-cpu-1/compute_id [ 569.853743] env[62476]: WARNING nova.compute.manager [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Compute nodes ['0cae7a3c-64e3-4b86-8a81-24d587f58f11'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 569.890114] env[62476]: INFO nova.compute.manager [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 569.916293] env[62476]: WARNING nova.compute.manager [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 569.916549] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.916820] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.917081] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.917249] env[62476]: DEBUG nova.compute.resource_tracker [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 569.921373] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67008f7e-e392-47db-8d00-edd08a9ea525 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.928682] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96531933-0c29-4900-90cb-c9ce57a9ad98 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.944325] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a307a68e-12c5-4bb4-a96b-09aeb8839100 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.951467] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5305775-2eee-4436-a6a9-e02f8d2b8f13 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.982377] env[62476]: DEBUG nova.compute.resource_tracker [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180684MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 569.982544] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.982745] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.997356] env[62476]: WARNING nova.compute.resource_tracker [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] No compute node record for cpu-1:0cae7a3c-64e3-4b86-8a81-24d587f58f11: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 0cae7a3c-64e3-4b86-8a81-24d587f58f11 could not be found. [ 570.012286] env[62476]: INFO nova.compute.resource_tracker [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 [ 570.070667] env[62476]: DEBUG nova.compute.resource_tracker [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 570.070856] env[62476]: DEBUG nova.compute.resource_tracker [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=100GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 570.185631] env[62476]: INFO nova.scheduler.client.report [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] [req-fe59fa05-0971-4285-acc4-3fb14cf6d832] Created resource provider record via placement API for resource provider with UUID 0cae7a3c-64e3-4b86-8a81-24d587f58f11 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 570.202557] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a17b523-c45a-4f6e-8e1a-ebea9b634fff {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.210646] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64104312-ac96-479e-9994-fc39846a8dfd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.240051] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa216d15-0514-4cb5-8c70-a9d93a587407 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.247732] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1828a7c2-1610-4a06-8c04-ec4bd101f3ac {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.261604] env[62476]: DEBUG nova.compute.provider_tree [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Updating inventory in ProviderTree for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 570.301850] env[62476]: DEBUG nova.scheduler.client.report [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Updated inventory for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 570.302125] env[62476]: DEBUG nova.compute.provider_tree [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Updating resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 generation from 0 to 1 during operation: update_inventory {{(pid=62476) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 570.302274] env[62476]: DEBUG nova.compute.provider_tree [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Updating inventory in ProviderTree for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 570.353426] env[62476]: DEBUG nova.compute.provider_tree [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Updating resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 generation from 1 to 2 during operation: update_traits {{(pid=62476) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 570.372076] env[62476]: DEBUG nova.compute.resource_tracker [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 570.372284] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.389s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.372444] env[62476]: DEBUG nova.service [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Creating RPC server for service compute {{(pid=62476) start /opt/stack/nova/nova/service.py:182}} [ 570.384731] env[62476]: DEBUG nova.service [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] Join ServiceGroup membership for this service compute {{(pid=62476) start /opt/stack/nova/nova/service.py:199}} [ 570.384922] env[62476]: DEBUG nova.servicegroup.drivers.db [None req-6cbdfe33-cebd-4a11-b8b6-e66352885e95 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62476) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 579.650802] env[62476]: DEBUG dbcounter [-] [62476] Writing DB stats nova_cell1:SELECT=1 {{(pid=62476) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 579.651636] env[62476]: DEBUG dbcounter [-] [62476] Writing DB stats nova_cell0:SELECT=1 {{(pid=62476) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 606.388749] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_power_states {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 606.402907] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Getting list of instances from cluster (obj){ [ 606.402907] env[62476]: value = "domain-c8" [ 606.402907] env[62476]: _type = "ClusterComputeResource" [ 606.402907] env[62476]: } {{(pid=62476) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 606.406286] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f095cca7-9552-46de-8190-ab6e38c1a9f8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.423283] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Got total of 0 instances {{(pid=62476) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 606.423392] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 606.424406] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Getting list of instances from cluster (obj){ [ 606.424406] env[62476]: value = "domain-c8" [ 606.424406] env[62476]: _type = "ClusterComputeResource" [ 606.424406] env[62476]: } {{(pid=62476) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 606.424699] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b922940-0b40-4f8f-940c-8476e4cc8d7f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.434293] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Got total of 0 instances {{(pid=62476) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 616.071831] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Acquiring lock "5f15094d-b066-4025-af5d-4ed35af2dfee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.071831] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Lock "5f15094d-b066-4025-af5d-4ed35af2dfee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.109179] env[62476]: DEBUG nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 616.282133] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.282416] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.288072] env[62476]: INFO nova.compute.claims [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 616.448499] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36d8775-69b4-476b-8084-47d1163133e7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.459201] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa3d058-d211-4e8b-a333-f616bd86393d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.512832] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b13a502-47b1-4980-afc9-3ec2b071ef6f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.522532] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa87fb91-5e6b-42b4-adae-eb5ccd1f447a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.546948] env[62476]: DEBUG nova.compute.provider_tree [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.563182] env[62476]: DEBUG nova.scheduler.client.report [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 616.587586] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.305s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.589712] env[62476]: DEBUG nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 616.630986] env[62476]: DEBUG nova.compute.utils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 616.633751] env[62476]: DEBUG nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 616.634155] env[62476]: DEBUG nova.network.neutron [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 616.659824] env[62476]: DEBUG nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 616.770128] env[62476]: DEBUG nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 616.885597] env[62476]: DEBUG nova.virt.hardware [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 616.888929] env[62476]: DEBUG nova.virt.hardware [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 616.888929] env[62476]: DEBUG nova.virt.hardware [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 616.888929] env[62476]: DEBUG nova.virt.hardware [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 616.888929] env[62476]: DEBUG nova.virt.hardware [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 616.888929] env[62476]: DEBUG nova.virt.hardware [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 616.889241] env[62476]: DEBUG nova.virt.hardware [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 616.889241] env[62476]: DEBUG nova.virt.hardware [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 616.889241] env[62476]: DEBUG nova.virt.hardware [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 616.889241] env[62476]: DEBUG nova.virt.hardware [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 616.889373] env[62476]: DEBUG nova.virt.hardware [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 616.893240] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f814e66c-f14c-4502-8a63-08d9ca195eca {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.908752] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a905a591-cd31-4be2-b3b8-b50d38c16ea4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.937052] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7727dd-c1a7-4a2d-b4af-f515b56d6434 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.036351] env[62476]: DEBUG nova.policy [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac7bdfcb36fa4acb9cabc17c8af07436', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a18d3ed535a406494d92e596a2b387d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 617.785580] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "ae5723f6-0107-46e8-971d-fca307ce67c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.785869] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "ae5723f6-0107-46e8-971d-fca307ce67c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.804481] env[62476]: DEBUG nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 617.915430] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.915708] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.917338] env[62476]: INFO nova.compute.claims [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.136291] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bb748f-7866-4cec-bbeb-d78406d72c51 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.154071] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc57901-689b-41b8-ba70-6aadf169d942 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.199603] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecd11e1-961e-4c7c-8858-a5873b036e84 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.206840] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e74fb7-91be-415c-97b3-8db2163d86b3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.215518] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Acquiring lock "adf2f380-84ad-480b-aa9a-16b19c05a3f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.215841] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Lock "adf2f380-84ad-480b-aa9a-16b19c05a3f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.231659] env[62476]: DEBUG nova.compute.provider_tree [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.242983] env[62476]: DEBUG nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 618.257760] env[62476]: DEBUG nova.scheduler.client.report [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 618.281771] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.366s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.282376] env[62476]: DEBUG nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 618.352788] env[62476]: DEBUG nova.compute.utils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 618.354436] env[62476]: DEBUG nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 618.354710] env[62476]: DEBUG nova.network.neutron [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 618.366444] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.366760] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.368571] env[62476]: INFO nova.compute.claims [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.385216] env[62476]: DEBUG nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 618.508775] env[62476]: DEBUG nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 618.561033] env[62476]: DEBUG nova.virt.hardware [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 618.561330] env[62476]: DEBUG nova.virt.hardware [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 618.561444] env[62476]: DEBUG nova.virt.hardware [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.561622] env[62476]: DEBUG nova.virt.hardware [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 618.561826] env[62476]: DEBUG nova.virt.hardware [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.561922] env[62476]: DEBUG nova.virt.hardware [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 618.562832] env[62476]: DEBUG nova.virt.hardware [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 618.563041] env[62476]: DEBUG nova.virt.hardware [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 618.563538] env[62476]: DEBUG nova.virt.hardware [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 618.563580] env[62476]: DEBUG nova.virt.hardware [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 618.563722] env[62476]: DEBUG nova.virt.hardware [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 618.564651] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594e0e4a-a540-4818-a406-8e2b0cfd3789 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.575468] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9eaa30-2cad-4ecc-b4b9-e876afc8b8cf {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.579671] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fe22e5-e51d-450e-aefb-af888b8ca0f5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.612873] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626251c3-4eec-47b3-9a87-0772a3eb2f6a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.655329] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ad7ef3-eb5d-4982-a30a-9b89289fe105 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.664138] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6462deb0-26ca-4d96-8441-c379119c796e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.682302] env[62476]: DEBUG nova.compute.provider_tree [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.699019] env[62476]: DEBUG nova.scheduler.client.report [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 618.726554] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.360s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.728282] env[62476]: DEBUG nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 618.800474] env[62476]: DEBUG nova.compute.utils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 618.805151] env[62476]: DEBUG nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 618.805151] env[62476]: DEBUG nova.network.neutron [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 618.829231] env[62476]: DEBUG nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 618.869679] env[62476]: DEBUG nova.policy [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1049e5f09cf0462fa40943bc3f5cc739', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7638c00f848b483283237ea78e8d03fc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 618.940255] env[62476]: DEBUG nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 618.981262] env[62476]: DEBUG nova.virt.hardware [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 618.981611] env[62476]: DEBUG nova.virt.hardware [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 618.981823] env[62476]: DEBUG nova.virt.hardware [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.982081] env[62476]: DEBUG nova.virt.hardware [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 618.982419] env[62476]: DEBUG nova.virt.hardware [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.982419] env[62476]: DEBUG nova.virt.hardware [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 618.982683] env[62476]: DEBUG nova.virt.hardware [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 618.982891] env[62476]: DEBUG nova.virt.hardware [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 618.983710] env[62476]: DEBUG nova.virt.hardware [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 618.983886] env[62476]: DEBUG nova.virt.hardware [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 618.984015] env[62476]: DEBUG nova.virt.hardware [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 618.985258] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a827d3fe-3657-4102-b53d-95a366701738 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.997324] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02fc49cc-5cae-4e4b-8006-d3a29429ec8e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.231189] env[62476]: DEBUG nova.policy [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fd247a8a8a74a29b12551aae697a339', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45114e21979046538f506b9e505aa754', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 619.702701] env[62476]: DEBUG nova.network.neutron [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Successfully created port: 1d921fab-e487-493e-bba8-25dab3763cf9 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 620.366967] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Acquiring lock "187242f5-934b-4c1d-b8ac-2ce8c347351a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.367623] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Lock "187242f5-934b-4c1d-b8ac-2ce8c347351a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.394779] env[62476]: DEBUG nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 620.463784] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.464058] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.466114] env[62476]: INFO nova.compute.claims [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 620.618836] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2eebcd9-8633-4b6b-9e11-c63dd5c1e62a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.626968] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345011b7-2b4d-4c65-ae72-6238eb1eebae {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.666874] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77fcb9d2-6dd0-4758-a75f-ccbaa4c9fc9a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.676033] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d60eb3c-e84a-4009-8489-fe6648c07e15 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.691342] env[62476]: DEBUG nova.compute.provider_tree [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.708495] env[62476]: DEBUG nova.scheduler.client.report [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 620.728669] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.264s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.728669] env[62476]: DEBUG nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 620.790468] env[62476]: DEBUG nova.compute.utils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 620.794643] env[62476]: DEBUG nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 620.794643] env[62476]: DEBUG nova.network.neutron [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 620.812041] env[62476]: DEBUG nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 620.897908] env[62476]: DEBUG nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 620.945116] env[62476]: DEBUG nova.virt.hardware [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 620.946925] env[62476]: DEBUG nova.virt.hardware [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 620.946925] env[62476]: DEBUG nova.virt.hardware [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 620.946925] env[62476]: DEBUG nova.virt.hardware [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 620.946925] env[62476]: DEBUG nova.virt.hardware [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 620.946925] env[62476]: DEBUG nova.virt.hardware [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 620.948072] env[62476]: DEBUG nova.virt.hardware [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 620.948072] env[62476]: DEBUG nova.virt.hardware [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 620.948365] env[62476]: DEBUG nova.virt.hardware [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 620.948536] env[62476]: DEBUG nova.virt.hardware [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 620.948783] env[62476]: DEBUG nova.virt.hardware [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 620.950515] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736ae975-5d81-4aa2-9f8a-f9bb336f4005 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.961013] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b67ba0-0a96-40d4-a7db-6fe128f7b856 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.147510] env[62476]: DEBUG nova.network.neutron [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Successfully created port: 932450c0-a244-4d32-ab31-854cdaa65b25 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.222819] env[62476]: DEBUG nova.policy [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7744defc4e18426a997eb90233003ce6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a603971d670c488c90de90ed1cdb5109', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 621.869240] env[62476]: DEBUG nova.network.neutron [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Successfully created port: 7889031d-dea0-4fc2-8763-24aea912707a {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 622.844243] env[62476]: DEBUG nova.network.neutron [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Successfully updated port: 1d921fab-e487-493e-bba8-25dab3763cf9 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 622.872378] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Acquiring lock "refresh_cache-5f15094d-b066-4025-af5d-4ed35af2dfee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.872654] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Acquired lock "refresh_cache-5f15094d-b066-4025-af5d-4ed35af2dfee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.872863] env[62476]: DEBUG nova.network.neutron [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 623.037042] env[62476]: DEBUG nova.network.neutron [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Successfully created port: c8ba1038-c494-4b3b-86a1-3d19da5bd797 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 623.097266] env[62476]: DEBUG nova.network.neutron [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.340887] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Acquiring lock "1f9496b6-ff55-473d-8b82-d1e4e3afe0f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.341136] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Lock "1f9496b6-ff55-473d-8b82-d1e4e3afe0f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.357076] env[62476]: DEBUG nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 623.427216] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.427673] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.429911] env[62476]: INFO nova.compute.claims [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 623.628359] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152834b2-20d5-4186-9aab-d483d183abec {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.637658] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98942d23-fb67-41f9-b275-6fb0d0f7ec03 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.670826] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caba9cef-61b4-47d7-9db0-604efb12627d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.680636] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31608dd7-d3ad-41a7-9b4c-9dbc43ffb055 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.698208] env[62476]: DEBUG nova.compute.provider_tree [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.712503] env[62476]: DEBUG nova.scheduler.client.report [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 623.737598] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.308s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.737598] env[62476]: DEBUG nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 623.787468] env[62476]: DEBUG nova.compute.utils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 623.796244] env[62476]: DEBUG nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 623.800665] env[62476]: DEBUG nova.network.neutron [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 623.814141] env[62476]: DEBUG nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 623.900334] env[62476]: DEBUG nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 623.935272] env[62476]: DEBUG nova.virt.hardware [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 623.935745] env[62476]: DEBUG nova.virt.hardware [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 623.936048] env[62476]: DEBUG nova.virt.hardware [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 623.936327] env[62476]: DEBUG nova.virt.hardware [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 623.936545] env[62476]: DEBUG nova.virt.hardware [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 623.936777] env[62476]: DEBUG nova.virt.hardware [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 623.937109] env[62476]: DEBUG nova.virt.hardware [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 623.937344] env[62476]: DEBUG nova.virt.hardware [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 623.937596] env[62476]: DEBUG nova.virt.hardware [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 623.938046] env[62476]: DEBUG nova.virt.hardware [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 623.938881] env[62476]: DEBUG nova.virt.hardware [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 623.940525] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420cf827-954d-48c7-bc8a-b9ec929a199e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.959680] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d5d896-8b11-4e8b-ad27-b7a8441d5caf {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.330040] env[62476]: DEBUG nova.policy [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '70adefe1b8c940de8e493db0a8218c87', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13cea4fc87e04b799f2be7251cf3b45f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 624.570995] env[62476]: DEBUG nova.network.neutron [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Updating instance_info_cache with network_info: [{"id": "1d921fab-e487-493e-bba8-25dab3763cf9", "address": "fa:16:3e:37:88:26", "network": {"id": "29f43b1f-3ea3-468a-bf66-82c60aa160ac", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1092135988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a18d3ed535a406494d92e596a2b387d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d921fab-e4", "ovs_interfaceid": "1d921fab-e487-493e-bba8-25dab3763cf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.593693] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Releasing lock "refresh_cache-5f15094d-b066-4025-af5d-4ed35af2dfee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.594170] env[62476]: DEBUG nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Instance network_info: |[{"id": "1d921fab-e487-493e-bba8-25dab3763cf9", "address": "fa:16:3e:37:88:26", "network": {"id": "29f43b1f-3ea3-468a-bf66-82c60aa160ac", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1092135988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a18d3ed535a406494d92e596a2b387d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d921fab-e4", "ovs_interfaceid": "1d921fab-e487-493e-bba8-25dab3763cf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 624.594545] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:88:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a0d5af-5be9-477a-837c-58ef55c717f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d921fab-e487-493e-bba8-25dab3763cf9', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 624.611518] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 624.612339] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1cdc7a99-426f-4b6c-944f-d729dae797d0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.633956] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Created folder: OpenStack in parent group-v4. [ 624.634195] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Creating folder: Project (0a18d3ed535a406494d92e596a2b387d). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 624.634450] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11721657-8198-42ca-96de-ca1b5e883c3c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.646265] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Created folder: Project (0a18d3ed535a406494d92e596a2b387d) in parent group-v849485. [ 624.646641] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Creating folder: Instances. Parent ref: group-v849486. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 624.646991] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ad9ac48-fab2-49e3-b4e2-9cee6fe85b6b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.659038] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Created folder: Instances in parent group-v849486. [ 624.659038] env[62476]: DEBUG oslo.service.loopingcall [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 624.659038] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 624.659321] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a409671-c2b3-4388-bf76-12f4a4c7a3ca {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.687861] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 624.687861] env[62476]: value = "task-4319000" [ 624.687861] env[62476]: _type = "Task" [ 624.687861] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.698651] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319000, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.940424] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "6c3f0540-a722-4a13-9982-f40c2d6ce9b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.940824] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "6c3f0540-a722-4a13-9982-f40c2d6ce9b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.968299] env[62476]: DEBUG nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 625.074393] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.074658] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.076776] env[62476]: INFO nova.compute.claims [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 625.202592] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319000, 'name': CreateVM_Task, 'duration_secs': 0.356785} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.203397] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 625.293820] env[62476]: DEBUG oslo_vmware.service [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b17fd3-64f5-4642-b09f-e4361574c910 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.310672] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.310881] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.312291] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 625.312685] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5e74322-487a-4093-814f-66b21105666a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.324016] env[62476]: DEBUG oslo_vmware.api [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Waiting for the task: (returnval){ [ 625.324016] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]525c000c-2089-1254-1017-af947ba0a409" [ 625.324016] env[62476]: _type = "Task" [ 625.324016] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.338618] env[62476]: DEBUG oslo_vmware.api [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]525c000c-2089-1254-1017-af947ba0a409, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.352071] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896e4c55-4492-4582-9216-b99490c91a94 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.360746] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51811e01-43e9-4e99-8505-57ce4c26ea3a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.399658] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feceb76f-8a12-4f5d-ad4e-a8d318a5acc9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.406687] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc6c3532-eebf-428f-aad8-cef2d579d139 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.421805] env[62476]: DEBUG nova.compute.provider_tree [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.432152] env[62476]: DEBUG nova.scheduler.client.report [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 625.457689] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.383s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.458230] env[62476]: DEBUG nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 625.523160] env[62476]: DEBUG nova.compute.utils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 625.524548] env[62476]: DEBUG nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 625.524718] env[62476]: DEBUG nova.network.neutron [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 625.541888] env[62476]: DEBUG nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 625.628499] env[62476]: DEBUG nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 625.657444] env[62476]: DEBUG nova.network.neutron [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Successfully updated port: 932450c0-a244-4d32-ab31-854cdaa65b25 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 625.676898] env[62476]: DEBUG nova.virt.hardware [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 625.676898] env[62476]: DEBUG nova.virt.hardware [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 625.676898] env[62476]: DEBUG nova.virt.hardware [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 625.681266] env[62476]: DEBUG nova.virt.hardware [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 625.681266] env[62476]: DEBUG nova.virt.hardware [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 625.681266] env[62476]: DEBUG nova.virt.hardware [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 625.681266] env[62476]: DEBUG nova.virt.hardware [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 625.681266] env[62476]: DEBUG nova.virt.hardware [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 625.681429] env[62476]: DEBUG nova.virt.hardware [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 625.681429] env[62476]: DEBUG nova.virt.hardware [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 625.681901] env[62476]: DEBUG nova.virt.hardware [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 625.684466] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9041b5f-a49f-458e-ad12-104a0131b0a0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.690488] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "refresh_cache-ae5723f6-0107-46e8-971d-fca307ce67c8" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.691030] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquired lock "refresh_cache-ae5723f6-0107-46e8-971d-fca307ce67c8" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.691383] env[62476]: DEBUG nova.network.neutron [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 625.708825] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0918684a-54fc-4d59-b954-ec332d1620f7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.752772] env[62476]: DEBUG nova.network.neutron [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Successfully updated port: 7889031d-dea0-4fc2-8763-24aea912707a {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 625.766801] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Acquiring lock "refresh_cache-adf2f380-84ad-480b-aa9a-16b19c05a3f3" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.766801] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Acquired lock "refresh_cache-adf2f380-84ad-480b-aa9a-16b19c05a3f3" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.766801] env[62476]: DEBUG nova.network.neutron [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 625.841544] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.842111] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 625.842558] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.842861] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.843419] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 625.844309] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c49b9e9-aea0-4c90-b076-8ca9d286bb7b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.867509] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 625.867509] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 625.867509] env[62476]: DEBUG nova.policy [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a117f106402424280e477babc21990c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f16c7f1cb3ec41ffbdd622e3ee5992ec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 625.873466] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab1f694-23d7-4a2e-ab49-8fb5d3d2b958 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.882260] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-236afb1a-fdce-458d-a744-d858b189d227 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.888066] env[62476]: DEBUG oslo_vmware.api [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Waiting for the task: (returnval){ [ 625.888066] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52df86c2-f968-51a5-3a11-cac303650797" [ 625.888066] env[62476]: _type = "Task" [ 625.888066] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.896579] env[62476]: DEBUG oslo_vmware.api [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52df86c2-f968-51a5-3a11-cac303650797, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.897673] env[62476]: DEBUG nova.network.neutron [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 626.039354] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.040103] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.040399] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 626.042472] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 626.076543] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 626.076674] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 626.076839] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 626.076930] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 626.077026] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 626.077162] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 626.077288] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 626.077915] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.078227] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.078429] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.078619] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.078808] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.078989] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.079173] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 626.079318] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.094009] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.094241] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.094417] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.094575] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 626.095795] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38dcc3a8-d747-435b-8a96-6d4d5ad240fe {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.099999] env[62476]: DEBUG nova.network.neutron [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 626.109485] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ff3136-6fe5-4efc-8db9-4d79b00c75b4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.135529] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce8e3e8-aadb-4423-be3f-e3e2e152c6f2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.144965] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1a2971-5a64-4aa2-a66b-630790c1d919 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.180135] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180682MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 626.180135] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.180323] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.291932] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5f15094d-b066-4025-af5d-4ed35af2dfee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 626.291932] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ae5723f6-0107-46e8-971d-fca307ce67c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 626.292037] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance adf2f380-84ad-480b-aa9a-16b19c05a3f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 626.292166] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 187242f5-934b-4c1d-b8ac-2ce8c347351a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 626.292287] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 626.292407] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6c3f0540-a722-4a13-9982-f40c2d6ce9b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 626.293523] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 626.293523] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=100GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 626.410932] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 626.411097] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Creating directory with path [datastore1] vmware_temp/818c9aa7-ee9b-483b-b852-70afaa96745f/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 626.411658] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a956661b-980f-4eea-8289-c5a67aa7b198 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.423570] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47106b1e-0787-4913-af87-ca97a4632fa7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.436551] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ceeb725-f830-4307-a9e9-4e9e0ba75af3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.439677] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Created directory with path [datastore1] vmware_temp/818c9aa7-ee9b-483b-b852-70afaa96745f/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 626.440576] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Fetch image to [datastore1] vmware_temp/818c9aa7-ee9b-483b-b852-70afaa96745f/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 626.440576] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/818c9aa7-ee9b-483b-b852-70afaa96745f/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 626.440798] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732ae006-9c5c-4147-973f-7ec9481620a4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.486252] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226c213e-d75a-4cd3-b5b9-23432f1f348b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.488957] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfff3bf-bde9-488b-9b4c-6f346bc75fcb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.502116] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3db72a-244f-4ead-93cd-436c7126e7a0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.507470] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306a883d-1b37-4495-bcd7-2ccd2246697d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.553355] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.557149] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945050dc-ace1-44af-86e2-84cdf5db4bec {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.563669] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f06b061d-cb35-4e6e-8258-ec7d617380cc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.567724] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 626.589711] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 626.589711] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.409s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.603604] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 626.695698] env[62476]: DEBUG oslo_vmware.rw_handles [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/818c9aa7-ee9b-483b-b852-70afaa96745f/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 626.761099] env[62476]: DEBUG oslo_vmware.rw_handles [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 626.761848] env[62476]: DEBUG oslo_vmware.rw_handles [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/818c9aa7-ee9b-483b-b852-70afaa96745f/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 626.995396] env[62476]: DEBUG nova.network.neutron [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Successfully created port: f7ae5617-fde4-48d5-83f3-0a90b2e50a9f {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 627.300852] env[62476]: DEBUG nova.network.neutron [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Updating instance_info_cache with network_info: [{"id": "932450c0-a244-4d32-ab31-854cdaa65b25", "address": "fa:16:3e:67:46:cc", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.122", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap932450c0-a2", "ovs_interfaceid": "932450c0-a244-4d32-ab31-854cdaa65b25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.322356] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Releasing lock "refresh_cache-ae5723f6-0107-46e8-971d-fca307ce67c8" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.324415] env[62476]: DEBUG nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Instance network_info: |[{"id": "932450c0-a244-4d32-ab31-854cdaa65b25", "address": "fa:16:3e:67:46:cc", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.122", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap932450c0-a2", "ovs_interfaceid": "932450c0-a244-4d32-ab31-854cdaa65b25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 627.324666] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:46:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '715e3f37-7401-48fb-a0ee-59d340b40de1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '932450c0-a244-4d32-ab31-854cdaa65b25', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 627.337444] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Creating folder: Project (7638c00f848b483283237ea78e8d03fc). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 627.338129] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-befaa067-35f1-421c-87f7-1d9115086187 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.353733] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Created folder: Project (7638c00f848b483283237ea78e8d03fc) in parent group-v849485. [ 627.353733] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Creating folder: Instances. Parent ref: group-v849489. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 627.354903] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bea244d6-e49d-47b3-8797-e9bb66b29b6b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.366788] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Created folder: Instances in parent group-v849489. [ 627.367955] env[62476]: DEBUG oslo.service.loopingcall [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 627.367955] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 627.367955] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6355f8ba-e000-4e3d-994a-05f962dd761a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.392609] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 627.392609] env[62476]: value = "task-4319003" [ 627.392609] env[62476]: _type = "Task" [ 627.392609] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.401331] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319003, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.463671] env[62476]: DEBUG nova.network.neutron [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Updating instance_info_cache with network_info: [{"id": "7889031d-dea0-4fc2-8763-24aea912707a", "address": "fa:16:3e:86:ed:e0", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.226", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7889031d-de", "ovs_interfaceid": "7889031d-dea0-4fc2-8763-24aea912707a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.482321] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Releasing lock "refresh_cache-adf2f380-84ad-480b-aa9a-16b19c05a3f3" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.485351] env[62476]: DEBUG nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Instance network_info: |[{"id": "7889031d-dea0-4fc2-8763-24aea912707a", "address": "fa:16:3e:86:ed:e0", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.226", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7889031d-de", "ovs_interfaceid": "7889031d-dea0-4fc2-8763-24aea912707a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 627.485891] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:ed:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '715e3f37-7401-48fb-a0ee-59d340b40de1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7889031d-dea0-4fc2-8763-24aea912707a', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 627.498254] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Creating folder: Project (45114e21979046538f506b9e505aa754). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 627.498991] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-483cc2cc-7b4d-4e17-8afb-85af68053d14 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.513911] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Created folder: Project (45114e21979046538f506b9e505aa754) in parent group-v849485. [ 627.513911] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Creating folder: Instances. Parent ref: group-v849492. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 627.513911] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e244c85-e8e9-4943-92b7-1b3715416ffa {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.525625] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Created folder: Instances in parent group-v849492. [ 627.525881] env[62476]: DEBUG oslo.service.loopingcall [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 627.526131] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 627.526413] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03e13dbe-c2b3-4425-81b9-a96ea4f3d079 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.546863] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 627.546863] env[62476]: value = "task-4319006" [ 627.546863] env[62476]: _type = "Task" [ 627.546863] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.555669] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319006, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.918974] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319003, 'name': CreateVM_Task, 'duration_secs': 0.450588} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.918974] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 627.919758] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.919919] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.920420] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 627.920784] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21466640-30ba-4e86-91b3-cfcf3f4c4bb8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.933856] env[62476]: DEBUG oslo_vmware.api [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Waiting for the task: (returnval){ [ 627.933856] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52c8ca4d-5d7f-045b-7308-7300fb13d705" [ 627.933856] env[62476]: _type = "Task" [ 627.933856] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.946279] env[62476]: DEBUG oslo_vmware.api [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52c8ca4d-5d7f-045b-7308-7300fb13d705, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.026842] env[62476]: DEBUG nova.network.neutron [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Successfully updated port: c8ba1038-c494-4b3b-86a1-3d19da5bd797 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 628.052923] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Acquiring lock "refresh_cache-187242f5-934b-4c1d-b8ac-2ce8c347351a" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.053140] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Acquired lock "refresh_cache-187242f5-934b-4c1d-b8ac-2ce8c347351a" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.053328] env[62476]: DEBUG nova.network.neutron [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 628.078102] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319006, 'name': CreateVM_Task, 'duration_secs': 0.397905} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.078300] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 628.079358] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.107269] env[62476]: DEBUG nova.compute.manager [req-b67c4089-f58a-40f4-a6bd-e997672ab1b6 req-3a9cae22-eb5c-4c46-86d7-ee1bb7ffd6ac service nova] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Received event network-vif-plugged-1d921fab-e487-493e-bba8-25dab3763cf9 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 628.107947] env[62476]: DEBUG oslo_concurrency.lockutils [req-b67c4089-f58a-40f4-a6bd-e997672ab1b6 req-3a9cae22-eb5c-4c46-86d7-ee1bb7ffd6ac service nova] Acquiring lock "5f15094d-b066-4025-af5d-4ed35af2dfee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.107947] env[62476]: DEBUG oslo_concurrency.lockutils [req-b67c4089-f58a-40f4-a6bd-e997672ab1b6 req-3a9cae22-eb5c-4c46-86d7-ee1bb7ffd6ac service nova] Lock "5f15094d-b066-4025-af5d-4ed35af2dfee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.112150] env[62476]: DEBUG oslo_concurrency.lockutils [req-b67c4089-f58a-40f4-a6bd-e997672ab1b6 req-3a9cae22-eb5c-4c46-86d7-ee1bb7ffd6ac service nova] Lock "5f15094d-b066-4025-af5d-4ed35af2dfee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.112480] env[62476]: DEBUG nova.compute.manager [req-b67c4089-f58a-40f4-a6bd-e997672ab1b6 req-3a9cae22-eb5c-4c46-86d7-ee1bb7ffd6ac service nova] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] No waiting events found dispatching network-vif-plugged-1d921fab-e487-493e-bba8-25dab3763cf9 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 628.112675] env[62476]: WARNING nova.compute.manager [req-b67c4089-f58a-40f4-a6bd-e997672ab1b6 req-3a9cae22-eb5c-4c46-86d7-ee1bb7ffd6ac service nova] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Received unexpected event network-vif-plugged-1d921fab-e487-493e-bba8-25dab3763cf9 for instance with vm_state building and task_state spawning. [ 628.400890] env[62476]: DEBUG nova.network.neutron [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 628.450573] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.454025] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 628.454025] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.454025] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.454025] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 628.454663] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e258bb9-7688-40ff-94c6-ccf940ecc0ec {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.460507] env[62476]: DEBUG oslo_vmware.api [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Waiting for the task: (returnval){ [ 628.460507] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52a3e7cc-599a-cfa8-da1e-1cc674e536b7" [ 628.460507] env[62476]: _type = "Task" [ 628.460507] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.475364] env[62476]: DEBUG oslo_vmware.api [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52a3e7cc-599a-cfa8-da1e-1cc674e536b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.899540] env[62476]: DEBUG nova.network.neutron [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Successfully created port: 3bb9dfed-e100-49ab-baec-a47877525f4c {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 628.979994] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.981584] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 628.981825] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.479311] env[62476]: DEBUG nova.compute.manager [req-06899377-3afe-4709-9fe7-0085c35c5f88 req-0a03a371-7eb7-4e4f-b38e-e178e01294fe service nova] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Received event network-vif-plugged-7889031d-dea0-4fc2-8763-24aea912707a {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 629.479550] env[62476]: DEBUG oslo_concurrency.lockutils [req-06899377-3afe-4709-9fe7-0085c35c5f88 req-0a03a371-7eb7-4e4f-b38e-e178e01294fe service nova] Acquiring lock "adf2f380-84ad-480b-aa9a-16b19c05a3f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.479717] env[62476]: DEBUG oslo_concurrency.lockutils [req-06899377-3afe-4709-9fe7-0085c35c5f88 req-0a03a371-7eb7-4e4f-b38e-e178e01294fe service nova] Lock "adf2f380-84ad-480b-aa9a-16b19c05a3f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.479882] env[62476]: DEBUG oslo_concurrency.lockutils [req-06899377-3afe-4709-9fe7-0085c35c5f88 req-0a03a371-7eb7-4e4f-b38e-e178e01294fe service nova] Lock "adf2f380-84ad-480b-aa9a-16b19c05a3f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.481120] env[62476]: DEBUG nova.compute.manager [req-06899377-3afe-4709-9fe7-0085c35c5f88 req-0a03a371-7eb7-4e4f-b38e-e178e01294fe service nova] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] No waiting events found dispatching network-vif-plugged-7889031d-dea0-4fc2-8763-24aea912707a {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 629.481373] env[62476]: WARNING nova.compute.manager [req-06899377-3afe-4709-9fe7-0085c35c5f88 req-0a03a371-7eb7-4e4f-b38e-e178e01294fe service nova] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Received unexpected event network-vif-plugged-7889031d-dea0-4fc2-8763-24aea912707a for instance with vm_state building and task_state spawning. [ 629.773510] env[62476]: DEBUG nova.network.neutron [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Updating instance_info_cache with network_info: [{"id": "c8ba1038-c494-4b3b-86a1-3d19da5bd797", "address": "fa:16:3e:e0:ae:9e", "network": {"id": "2c2a0b25-bf6c-4475-8686-c82d28341dcd", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1958255649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a603971d670c488c90de90ed1cdb5109", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8ba1038-c4", "ovs_interfaceid": "c8ba1038-c494-4b3b-86a1-3d19da5bd797", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.796609] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Releasing lock "refresh_cache-187242f5-934b-4c1d-b8ac-2ce8c347351a" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.802029] env[62476]: DEBUG nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Instance network_info: |[{"id": "c8ba1038-c494-4b3b-86a1-3d19da5bd797", "address": "fa:16:3e:e0:ae:9e", "network": {"id": "2c2a0b25-bf6c-4475-8686-c82d28341dcd", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1958255649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a603971d670c488c90de90ed1cdb5109", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8ba1038-c4", "ovs_interfaceid": "c8ba1038-c494-4b3b-86a1-3d19da5bd797", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 629.802163] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:ae:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77ccbd87-ecfd-4b2d-a1ea-29774addcef6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8ba1038-c494-4b3b-86a1-3d19da5bd797', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 629.813993] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Creating folder: Project (a603971d670c488c90de90ed1cdb5109). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 629.814935] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-826064ea-7239-4ccd-9065-14d13cc7ee90 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.832335] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Created folder: Project (a603971d670c488c90de90ed1cdb5109) in parent group-v849485. [ 629.832335] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Creating folder: Instances. Parent ref: group-v849495. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 629.832335] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c533fc40-2507-4109-a210-c9961d90cabd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.844794] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Created folder: Instances in parent group-v849495. [ 629.845074] env[62476]: DEBUG oslo.service.loopingcall [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 629.845281] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 629.845497] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a818199-ac5b-4b99-aaee-8a37d83fcb2f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.872994] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 629.872994] env[62476]: value = "task-4319009" [ 629.872994] env[62476]: _type = "Task" [ 629.872994] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.883667] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319009, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.387253] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319009, 'name': CreateVM_Task} progress is 99%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.895843] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319009, 'name': CreateVM_Task, 'duration_secs': 0.544084} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.896104] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 630.897775] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.897983] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.898323] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 630.898590] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c64ac97d-962f-463a-b0c1-3bf86572997d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.906040] env[62476]: DEBUG oslo_vmware.api [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Waiting for the task: (returnval){ [ 630.906040] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52baefe0-0483-a0dd-53dc-f053e04a9f4c" [ 630.906040] env[62476]: _type = "Task" [ 630.906040] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.931145] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.931459] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 630.931665] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.224125] env[62476]: DEBUG nova.network.neutron [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Successfully updated port: f7ae5617-fde4-48d5-83f3-0a90b2e50a9f {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 632.242453] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Acquiring lock "refresh_cache-1f9496b6-ff55-473d-8b82-d1e4e3afe0f1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.242453] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Acquired lock "refresh_cache-1f9496b6-ff55-473d-8b82-d1e4e3afe0f1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.242572] env[62476]: DEBUG nova.network.neutron [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 632.334536] env[62476]: DEBUG nova.compute.manager [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Received event network-changed-1d921fab-e487-493e-bba8-25dab3763cf9 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 632.334856] env[62476]: DEBUG nova.compute.manager [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Refreshing instance network info cache due to event network-changed-1d921fab-e487-493e-bba8-25dab3763cf9. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 632.334986] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Acquiring lock "refresh_cache-5f15094d-b066-4025-af5d-4ed35af2dfee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.335323] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Acquired lock "refresh_cache-5f15094d-b066-4025-af5d-4ed35af2dfee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.335518] env[62476]: DEBUG nova.network.neutron [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Refreshing network info cache for port 1d921fab-e487-493e-bba8-25dab3763cf9 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 632.488100] env[62476]: DEBUG nova.network.neutron [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 633.893164] env[62476]: DEBUG nova.compute.manager [req-d8206565-9d30-424a-9682-d940bbc0a6bd req-dda01230-8c9a-4caf-b3d5-369d5647168d service nova] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Received event network-changed-7889031d-dea0-4fc2-8763-24aea912707a {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 633.893164] env[62476]: DEBUG nova.compute.manager [req-d8206565-9d30-424a-9682-d940bbc0a6bd req-dda01230-8c9a-4caf-b3d5-369d5647168d service nova] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Refreshing instance network info cache due to event network-changed-7889031d-dea0-4fc2-8763-24aea912707a. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 633.893164] env[62476]: DEBUG oslo_concurrency.lockutils [req-d8206565-9d30-424a-9682-d940bbc0a6bd req-dda01230-8c9a-4caf-b3d5-369d5647168d service nova] Acquiring lock "refresh_cache-adf2f380-84ad-480b-aa9a-16b19c05a3f3" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.893164] env[62476]: DEBUG oslo_concurrency.lockutils [req-d8206565-9d30-424a-9682-d940bbc0a6bd req-dda01230-8c9a-4caf-b3d5-369d5647168d service nova] Acquired lock "refresh_cache-adf2f380-84ad-480b-aa9a-16b19c05a3f3" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.893164] env[62476]: DEBUG nova.network.neutron [req-d8206565-9d30-424a-9682-d940bbc0a6bd req-dda01230-8c9a-4caf-b3d5-369d5647168d service nova] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Refreshing network info cache for port 7889031d-dea0-4fc2-8763-24aea912707a {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 634.038713] env[62476]: DEBUG nova.network.neutron [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Updating instance_info_cache with network_info: [{"id": "f7ae5617-fde4-48d5-83f3-0a90b2e50a9f", "address": "fa:16:3e:28:37:ec", "network": {"id": "1292eb06-5ab4-4098-9dd9-1f76f1e2a20c", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-599252121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cea4fc87e04b799f2be7251cf3b45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b91b49a8-b849-4d0c-97f7-74fdcd88ae03", "external-id": "nsx-vlan-transportzone-406", "segmentation_id": 406, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7ae5617-fd", "ovs_interfaceid": "f7ae5617-fde4-48d5-83f3-0a90b2e50a9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.074197] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Releasing lock "refresh_cache-1f9496b6-ff55-473d-8b82-d1e4e3afe0f1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.074197] env[62476]: DEBUG nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Instance network_info: |[{"id": "f7ae5617-fde4-48d5-83f3-0a90b2e50a9f", "address": "fa:16:3e:28:37:ec", "network": {"id": "1292eb06-5ab4-4098-9dd9-1f76f1e2a20c", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-599252121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cea4fc87e04b799f2be7251cf3b45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b91b49a8-b849-4d0c-97f7-74fdcd88ae03", "external-id": "nsx-vlan-transportzone-406", "segmentation_id": 406, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7ae5617-fd", "ovs_interfaceid": "f7ae5617-fde4-48d5-83f3-0a90b2e50a9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 634.075440] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:37:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b91b49a8-b849-4d0c-97f7-74fdcd88ae03', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7ae5617-fde4-48d5-83f3-0a90b2e50a9f', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 634.083391] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Creating folder: Project (13cea4fc87e04b799f2be7251cf3b45f). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 634.084572] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08dcc21a-5c80-45ea-a3be-909766b51932 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.096813] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Created folder: Project (13cea4fc87e04b799f2be7251cf3b45f) in parent group-v849485. [ 634.097083] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Creating folder: Instances. Parent ref: group-v849498. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 634.101457] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f90171ac-a7da-4f54-b09e-df053813820e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.114099] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Created folder: Instances in parent group-v849498. [ 634.115128] env[62476]: DEBUG oslo.service.loopingcall [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 634.115283] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 634.115615] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f4927ce-c0ea-4617-97c7-83d2d76476c1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.141746] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 634.141746] env[62476]: value = "task-4319012" [ 634.141746] env[62476]: _type = "Task" [ 634.141746] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.151921] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319012, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.417888] env[62476]: DEBUG nova.network.neutron [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Successfully updated port: 3bb9dfed-e100-49ab-baec-a47877525f4c {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 634.439742] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "refresh_cache-6c3f0540-a722-4a13-9982-f40c2d6ce9b1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.439918] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired lock "refresh_cache-6c3f0540-a722-4a13-9982-f40c2d6ce9b1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.440107] env[62476]: DEBUG nova.network.neutron [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 634.653994] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319012, 'name': CreateVM_Task, 'duration_secs': 0.491115} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.657265] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 634.657265] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.657265] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.657265] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 634.657265] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71cd0990-ec68-407a-8c0f-f30d32f9a045 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.661994] env[62476]: DEBUG oslo_vmware.api [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Waiting for the task: (returnval){ [ 634.661994] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]523cf965-a6fb-f584-94cf-0f0da3c0229c" [ 634.661994] env[62476]: _type = "Task" [ 634.661994] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.673601] env[62476]: DEBUG oslo_vmware.api [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]523cf965-a6fb-f584-94cf-0f0da3c0229c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.686822] env[62476]: DEBUG nova.network.neutron [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 634.788934] env[62476]: DEBUG nova.network.neutron [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Updated VIF entry in instance network info cache for port 1d921fab-e487-493e-bba8-25dab3763cf9. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 634.789326] env[62476]: DEBUG nova.network.neutron [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Updating instance_info_cache with network_info: [{"id": "1d921fab-e487-493e-bba8-25dab3763cf9", "address": "fa:16:3e:37:88:26", "network": {"id": "29f43b1f-3ea3-468a-bf66-82c60aa160ac", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1092135988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0a18d3ed535a406494d92e596a2b387d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d921fab-e4", "ovs_interfaceid": "1d921fab-e487-493e-bba8-25dab3763cf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.800747] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Releasing lock "refresh_cache-5f15094d-b066-4025-af5d-4ed35af2dfee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.801050] env[62476]: DEBUG nova.compute.manager [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Received event network-vif-plugged-932450c0-a244-4d32-ab31-854cdaa65b25 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 634.801252] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Acquiring lock "ae5723f6-0107-46e8-971d-fca307ce67c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.801456] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Lock "ae5723f6-0107-46e8-971d-fca307ce67c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.801618] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Lock "ae5723f6-0107-46e8-971d-fca307ce67c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.801784] env[62476]: DEBUG nova.compute.manager [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] No waiting events found dispatching network-vif-plugged-932450c0-a244-4d32-ab31-854cdaa65b25 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 634.801987] env[62476]: WARNING nova.compute.manager [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Received unexpected event network-vif-plugged-932450c0-a244-4d32-ab31-854cdaa65b25 for instance with vm_state building and task_state spawning. [ 634.802178] env[62476]: DEBUG nova.compute.manager [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Received event network-changed-932450c0-a244-4d32-ab31-854cdaa65b25 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 634.802525] env[62476]: DEBUG nova.compute.manager [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Refreshing instance network info cache due to event network-changed-932450c0-a244-4d32-ab31-854cdaa65b25. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 634.802525] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Acquiring lock "refresh_cache-ae5723f6-0107-46e8-971d-fca307ce67c8" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.802640] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Acquired lock "refresh_cache-ae5723f6-0107-46e8-971d-fca307ce67c8" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.802794] env[62476]: DEBUG nova.network.neutron [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Refreshing network info cache for port 932450c0-a244-4d32-ab31-854cdaa65b25 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 635.178786] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.178786] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 635.178786] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.420683] env[62476]: DEBUG nova.network.neutron [req-d8206565-9d30-424a-9682-d940bbc0a6bd req-dda01230-8c9a-4caf-b3d5-369d5647168d service nova] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Updated VIF entry in instance network info cache for port 7889031d-dea0-4fc2-8763-24aea912707a. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 635.420973] env[62476]: DEBUG nova.network.neutron [req-d8206565-9d30-424a-9682-d940bbc0a6bd req-dda01230-8c9a-4caf-b3d5-369d5647168d service nova] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Updating instance_info_cache with network_info: [{"id": "7889031d-dea0-4fc2-8763-24aea912707a", "address": "fa:16:3e:86:ed:e0", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.226", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7889031d-de", "ovs_interfaceid": "7889031d-dea0-4fc2-8763-24aea912707a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.435588] env[62476]: DEBUG oslo_concurrency.lockutils [req-d8206565-9d30-424a-9682-d940bbc0a6bd req-dda01230-8c9a-4caf-b3d5-369d5647168d service nova] Releasing lock "refresh_cache-adf2f380-84ad-480b-aa9a-16b19c05a3f3" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.732274] env[62476]: DEBUG nova.network.neutron [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Updating instance_info_cache with network_info: [{"id": "3bb9dfed-e100-49ab-baec-a47877525f4c", "address": "fa:16:3e:87:4b:b2", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bb9dfed-e1", "ovs_interfaceid": "3bb9dfed-e100-49ab-baec-a47877525f4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.747266] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Releasing lock "refresh_cache-6c3f0540-a722-4a13-9982-f40c2d6ce9b1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.747847] env[62476]: DEBUG nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Instance network_info: |[{"id": "3bb9dfed-e100-49ab-baec-a47877525f4c", "address": "fa:16:3e:87:4b:b2", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bb9dfed-e1", "ovs_interfaceid": "3bb9dfed-e100-49ab-baec-a47877525f4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 635.751159] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:4b:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3734b156-0f7d-4721-b23c-d000412ec2eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3bb9dfed-e100-49ab-baec-a47877525f4c', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 635.766165] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Creating folder: Project (f16c7f1cb3ec41ffbdd622e3ee5992ec). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 635.766165] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bd142d02-5c22-482d-a474-6c58b63c36c4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.781783] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Created folder: Project (f16c7f1cb3ec41ffbdd622e3ee5992ec) in parent group-v849485. [ 635.781783] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Creating folder: Instances. Parent ref: group-v849501. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 635.781783] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1b85292-add2-47d4-8aea-b8e06378e6b4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.793062] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Created folder: Instances in parent group-v849501. [ 635.793062] env[62476]: DEBUG oslo.service.loopingcall [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 635.793062] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 635.793062] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f6cb381-2391-47cd-979d-5b41e43cf2ef {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.818035] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 635.818035] env[62476]: value = "task-4319015" [ 635.818035] env[62476]: _type = "Task" [ 635.818035] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.831498] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319015, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.172889] env[62476]: DEBUG nova.network.neutron [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Updated VIF entry in instance network info cache for port 932450c0-a244-4d32-ab31-854cdaa65b25. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 636.173499] env[62476]: DEBUG nova.network.neutron [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Updating instance_info_cache with network_info: [{"id": "932450c0-a244-4d32-ab31-854cdaa65b25", "address": "fa:16:3e:67:46:cc", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.122", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap932450c0-a2", "ovs_interfaceid": "932450c0-a244-4d32-ab31-854cdaa65b25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.196227] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Releasing lock "refresh_cache-ae5723f6-0107-46e8-971d-fca307ce67c8" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.196227] env[62476]: DEBUG nova.compute.manager [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Received event network-vif-plugged-c8ba1038-c494-4b3b-86a1-3d19da5bd797 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 636.196227] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Acquiring lock "187242f5-934b-4c1d-b8ac-2ce8c347351a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.196227] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Lock "187242f5-934b-4c1d-b8ac-2ce8c347351a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.196705] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Lock "187242f5-934b-4c1d-b8ac-2ce8c347351a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.196705] env[62476]: DEBUG nova.compute.manager [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] No waiting events found dispatching network-vif-plugged-c8ba1038-c494-4b3b-86a1-3d19da5bd797 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 636.196705] env[62476]: WARNING nova.compute.manager [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Received unexpected event network-vif-plugged-c8ba1038-c494-4b3b-86a1-3d19da5bd797 for instance with vm_state building and task_state spawning. [ 636.196912] env[62476]: DEBUG nova.compute.manager [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Received event network-changed-c8ba1038-c494-4b3b-86a1-3d19da5bd797 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 636.197101] env[62476]: DEBUG nova.compute.manager [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Refreshing instance network info cache due to event network-changed-c8ba1038-c494-4b3b-86a1-3d19da5bd797. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 636.197324] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Acquiring lock "refresh_cache-187242f5-934b-4c1d-b8ac-2ce8c347351a" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.197475] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Acquired lock "refresh_cache-187242f5-934b-4c1d-b8ac-2ce8c347351a" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.197656] env[62476]: DEBUG nova.network.neutron [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Refreshing network info cache for port c8ba1038-c494-4b3b-86a1-3d19da5bd797 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 636.333382] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319015, 'name': CreateVM_Task} progress is 99%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.343462] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Acquiring lock "760f3c9b-044d-4593-bc97-535ac09c3f3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.343462] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Lock "760f3c9b-044d-4593-bc97-535ac09c3f3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.380226] env[62476]: DEBUG nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 636.453414] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.453781] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.455128] env[62476]: INFO nova.compute.claims [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 636.682083] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b686d57-b064-4b75-9d28-7c4d78053e55 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.694734] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2d2bd9-bf2d-471a-b81e-98017ff14322 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.735901] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f7a579-cfc7-4e97-b5ee-a72bd344c7c8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.749390] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcac741-d7d0-4a53-976e-2c803db3a6ff {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.767021] env[62476]: DEBUG nova.compute.provider_tree [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.784293] env[62476]: DEBUG nova.scheduler.client.report [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 636.804857] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.351s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.804902] env[62476]: DEBUG nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 636.835616] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319015, 'name': CreateVM_Task} progress is 99%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.850859] env[62476]: DEBUG nova.compute.utils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 636.851946] env[62476]: DEBUG nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 636.856040] env[62476]: DEBUG nova.network.neutron [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 636.868597] env[62476]: DEBUG nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 636.983610] env[62476]: DEBUG nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 637.028990] env[62476]: DEBUG nova.virt.hardware [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 637.029411] env[62476]: DEBUG nova.virt.hardware [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 637.029503] env[62476]: DEBUG nova.virt.hardware [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 637.029687] env[62476]: DEBUG nova.virt.hardware [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 637.029840] env[62476]: DEBUG nova.virt.hardware [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 637.030025] env[62476]: DEBUG nova.virt.hardware [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 637.030324] env[62476]: DEBUG nova.virt.hardware [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 637.030425] env[62476]: DEBUG nova.virt.hardware [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 637.030681] env[62476]: DEBUG nova.virt.hardware [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 637.030782] env[62476]: DEBUG nova.virt.hardware [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 637.031028] env[62476]: DEBUG nova.virt.hardware [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 637.032283] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8396f1ea-c08c-4a0c-9e40-ef8f8d27dddb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.041860] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ab3c95-f756-4a76-bcda-dcab2fbf5fb7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.161058] env[62476]: DEBUG nova.policy [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '376465e8e1f0486fb4005dcf52e3a540', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f61870de4af4bd98fc7608e2e7a81a8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 637.335481] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319015, 'name': CreateVM_Task, 'duration_secs': 1.439026} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.335728] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 637.336359] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.336646] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.336997] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 637.337281] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac0806f7-e320-4d5f-8683-62b61ff40d2f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.345236] env[62476]: DEBUG oslo_vmware.api [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for the task: (returnval){ [ 637.345236] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52bc846d-b6ea-1237-db89-16bc6f6a57dd" [ 637.345236] env[62476]: _type = "Task" [ 637.345236] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.360866] env[62476]: DEBUG oslo_vmware.api [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52bc846d-b6ea-1237-db89-16bc6f6a57dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.486399] env[62476]: DEBUG nova.network.neutron [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Updated VIF entry in instance network info cache for port c8ba1038-c494-4b3b-86a1-3d19da5bd797. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 637.486399] env[62476]: DEBUG nova.network.neutron [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Updating instance_info_cache with network_info: [{"id": "c8ba1038-c494-4b3b-86a1-3d19da5bd797", "address": "fa:16:3e:e0:ae:9e", "network": {"id": "2c2a0b25-bf6c-4475-8686-c82d28341dcd", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1958255649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a603971d670c488c90de90ed1cdb5109", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77ccbd87-ecfd-4b2d-a1ea-29774addcef6", "external-id": "nsx-vlan-transportzone-385", "segmentation_id": 385, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8ba1038-c4", "ovs_interfaceid": "c8ba1038-c494-4b3b-86a1-3d19da5bd797", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.500955] env[62476]: DEBUG oslo_concurrency.lockutils [req-cd6b30e9-5c42-4a28-b352-a5fc259918ea req-17352ed4-a267-417c-8459-dd110867fe40 service nova] Releasing lock "refresh_cache-187242f5-934b-4c1d-b8ac-2ce8c347351a" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.581773] env[62476]: DEBUG nova.compute.manager [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Received event network-vif-plugged-f7ae5617-fde4-48d5-83f3-0a90b2e50a9f {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 637.582189] env[62476]: DEBUG oslo_concurrency.lockutils [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] Acquiring lock "1f9496b6-ff55-473d-8b82-d1e4e3afe0f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.582226] env[62476]: DEBUG oslo_concurrency.lockutils [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] Lock "1f9496b6-ff55-473d-8b82-d1e4e3afe0f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.582421] env[62476]: DEBUG oslo_concurrency.lockutils [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] Lock "1f9496b6-ff55-473d-8b82-d1e4e3afe0f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.582605] env[62476]: DEBUG nova.compute.manager [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] No waiting events found dispatching network-vif-plugged-f7ae5617-fde4-48d5-83f3-0a90b2e50a9f {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 637.582835] env[62476]: WARNING nova.compute.manager [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Received unexpected event network-vif-plugged-f7ae5617-fde4-48d5-83f3-0a90b2e50a9f for instance with vm_state building and task_state spawning. [ 637.583016] env[62476]: DEBUG nova.compute.manager [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Received event network-changed-f7ae5617-fde4-48d5-83f3-0a90b2e50a9f {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 637.583440] env[62476]: DEBUG nova.compute.manager [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Refreshing instance network info cache due to event network-changed-f7ae5617-fde4-48d5-83f3-0a90b2e50a9f. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 637.583486] env[62476]: DEBUG oslo_concurrency.lockutils [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] Acquiring lock "refresh_cache-1f9496b6-ff55-473d-8b82-d1e4e3afe0f1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.583623] env[62476]: DEBUG oslo_concurrency.lockutils [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] Acquired lock "refresh_cache-1f9496b6-ff55-473d-8b82-d1e4e3afe0f1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.583922] env[62476]: DEBUG nova.network.neutron [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Refreshing network info cache for port f7ae5617-fde4-48d5-83f3-0a90b2e50a9f {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 637.864640] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.867336] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 637.867336] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.612132] env[62476]: DEBUG nova.network.neutron [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Successfully created port: a8b0be21-1051-4e25-a2d0-ae49f7a396ed {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 639.725862] env[62476]: DEBUG nova.network.neutron [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Updated VIF entry in instance network info cache for port f7ae5617-fde4-48d5-83f3-0a90b2e50a9f. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 639.728146] env[62476]: DEBUG nova.network.neutron [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Updating instance_info_cache with network_info: [{"id": "f7ae5617-fde4-48d5-83f3-0a90b2e50a9f", "address": "fa:16:3e:28:37:ec", "network": {"id": "1292eb06-5ab4-4098-9dd9-1f76f1e2a20c", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-599252121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13cea4fc87e04b799f2be7251cf3b45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b91b49a8-b849-4d0c-97f7-74fdcd88ae03", "external-id": "nsx-vlan-transportzone-406", "segmentation_id": 406, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7ae5617-fd", "ovs_interfaceid": "f7ae5617-fde4-48d5-83f3-0a90b2e50a9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.741458] env[62476]: DEBUG oslo_concurrency.lockutils [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] Releasing lock "refresh_cache-1f9496b6-ff55-473d-8b82-d1e4e3afe0f1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.742597] env[62476]: DEBUG nova.compute.manager [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Received event network-vif-plugged-3bb9dfed-e100-49ab-baec-a47877525f4c {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 639.742905] env[62476]: DEBUG oslo_concurrency.lockutils [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] Acquiring lock "6c3f0540-a722-4a13-9982-f40c2d6ce9b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.743625] env[62476]: DEBUG oslo_concurrency.lockutils [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] Lock "6c3f0540-a722-4a13-9982-f40c2d6ce9b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.744628] env[62476]: DEBUG oslo_concurrency.lockutils [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] Lock "6c3f0540-a722-4a13-9982-f40c2d6ce9b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.744817] env[62476]: DEBUG nova.compute.manager [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] No waiting events found dispatching network-vif-plugged-3bb9dfed-e100-49ab-baec-a47877525f4c {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 639.744991] env[62476]: WARNING nova.compute.manager [req-26b72ae4-5045-47ac-8f7b-bfdb8ca1d343 req-44f40620-0ba1-4bad-af3c-bf20f92386ec service nova] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Received unexpected event network-vif-plugged-3bb9dfed-e100-49ab-baec-a47877525f4c for instance with vm_state building and task_state spawning. [ 642.441740] env[62476]: DEBUG nova.compute.manager [req-cf7993f3-49c4-4935-9a30-d9f148323bec req-6fc102e8-56bc-497e-8ae0-bb36c2f425fe service nova] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Received event network-changed-3bb9dfed-e100-49ab-baec-a47877525f4c {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 642.442230] env[62476]: DEBUG nova.compute.manager [req-cf7993f3-49c4-4935-9a30-d9f148323bec req-6fc102e8-56bc-497e-8ae0-bb36c2f425fe service nova] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Refreshing instance network info cache due to event network-changed-3bb9dfed-e100-49ab-baec-a47877525f4c. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 642.442230] env[62476]: DEBUG oslo_concurrency.lockutils [req-cf7993f3-49c4-4935-9a30-d9f148323bec req-6fc102e8-56bc-497e-8ae0-bb36c2f425fe service nova] Acquiring lock "refresh_cache-6c3f0540-a722-4a13-9982-f40c2d6ce9b1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.442396] env[62476]: DEBUG oslo_concurrency.lockutils [req-cf7993f3-49c4-4935-9a30-d9f148323bec req-6fc102e8-56bc-497e-8ae0-bb36c2f425fe service nova] Acquired lock "refresh_cache-6c3f0540-a722-4a13-9982-f40c2d6ce9b1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.442486] env[62476]: DEBUG nova.network.neutron [req-cf7993f3-49c4-4935-9a30-d9f148323bec req-6fc102e8-56bc-497e-8ae0-bb36c2f425fe service nova] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Refreshing network info cache for port 3bb9dfed-e100-49ab-baec-a47877525f4c {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 642.653265] env[62476]: DEBUG nova.network.neutron [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Successfully updated port: a8b0be21-1051-4e25-a2d0-ae49f7a396ed {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 642.674066] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Acquiring lock "refresh_cache-760f3c9b-044d-4593-bc97-535ac09c3f3b" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.674255] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Acquired lock "refresh_cache-760f3c9b-044d-4593-bc97-535ac09c3f3b" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.674408] env[62476]: DEBUG nova.network.neutron [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 642.783279] env[62476]: DEBUG nova.network.neutron [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.319801] env[62476]: DEBUG nova.network.neutron [req-cf7993f3-49c4-4935-9a30-d9f148323bec req-6fc102e8-56bc-497e-8ae0-bb36c2f425fe service nova] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Updated VIF entry in instance network info cache for port 3bb9dfed-e100-49ab-baec-a47877525f4c. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 643.320111] env[62476]: DEBUG nova.network.neutron [req-cf7993f3-49c4-4935-9a30-d9f148323bec req-6fc102e8-56bc-497e-8ae0-bb36c2f425fe service nova] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Updating instance_info_cache with network_info: [{"id": "3bb9dfed-e100-49ab-baec-a47877525f4c", "address": "fa:16:3e:87:4b:b2", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bb9dfed-e1", "ovs_interfaceid": "3bb9dfed-e100-49ab-baec-a47877525f4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.344098] env[62476]: DEBUG oslo_concurrency.lockutils [req-cf7993f3-49c4-4935-9a30-d9f148323bec req-6fc102e8-56bc-497e-8ae0-bb36c2f425fe service nova] Releasing lock "refresh_cache-6c3f0540-a722-4a13-9982-f40c2d6ce9b1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.466762] env[62476]: DEBUG nova.network.neutron [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Updating instance_info_cache with network_info: [{"id": "a8b0be21-1051-4e25-a2d0-ae49f7a396ed", "address": "fa:16:3e:de:bf:b7", "network": {"id": "7c53e6ec-f9a3-4a55-a6c2-57a6fc1afd1e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-595111746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f61870de4af4bd98fc7608e2e7a81a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8b0be21-10", "ovs_interfaceid": "a8b0be21-1051-4e25-a2d0-ae49f7a396ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.485347] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Releasing lock "refresh_cache-760f3c9b-044d-4593-bc97-535ac09c3f3b" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.487065] env[62476]: DEBUG nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Instance network_info: |[{"id": "a8b0be21-1051-4e25-a2d0-ae49f7a396ed", "address": "fa:16:3e:de:bf:b7", "network": {"id": "7c53e6ec-f9a3-4a55-a6c2-57a6fc1afd1e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-595111746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f61870de4af4bd98fc7608e2e7a81a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8b0be21-10", "ovs_interfaceid": "a8b0be21-1051-4e25-a2d0-ae49f7a396ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 643.487945] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:bf:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '209639b9-c313-4b35-86dc-dccd744d174a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8b0be21-1051-4e25-a2d0-ae49f7a396ed', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 643.504608] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Creating folder: Project (8f61870de4af4bd98fc7608e2e7a81a8). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 643.504976] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ff1aa6e-4ba4-49f9-b0d1-b64fe47e2c9c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.525229] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Created folder: Project (8f61870de4af4bd98fc7608e2e7a81a8) in parent group-v849485. [ 643.525438] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Creating folder: Instances. Parent ref: group-v849504. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 643.525695] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1412e1d3-3f0f-410c-a447-e75e1c3d6c92 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.537798] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Created folder: Instances in parent group-v849504. [ 643.538072] env[62476]: DEBUG oslo.service.loopingcall [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 643.538600] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 643.538600] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6e27af4-cc46-4b61-a49d-5485e2e5aa3f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.565281] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 643.565281] env[62476]: value = "task-4319018" [ 643.565281] env[62476]: _type = "Task" [ 643.565281] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.576312] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319018, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.077343] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319018, 'name': CreateVM_Task, 'duration_secs': 0.455691} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.077343] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 644.078191] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.078630] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.078795] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 644.079305] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29959071-5ec4-49b9-ad00-8d254e396681 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.088039] env[62476]: DEBUG oslo_vmware.api [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Waiting for the task: (returnval){ [ 644.088039] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52936060-bf29-af45-789b-220d59d64bdd" [ 644.088039] env[62476]: _type = "Task" [ 644.088039] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.099513] env[62476]: DEBUG oslo_vmware.api [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52936060-bf29-af45-789b-220d59d64bdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.605789] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.606073] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 644.606352] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.301483] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Acquiring lock "1323e67f-17c6-4432-8eea-98c285745766" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.302200] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Lock "1323e67f-17c6-4432-8eea-98c285745766" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.354788] env[62476]: DEBUG nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 645.466748] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.467046] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.468663] env[62476]: INFO nova.compute.claims [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.740368] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb3d6f0-567b-4640-97a6-ee4025c337ec {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.754573] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10f2ce0-2fb1-4f60-9c4b-61ee424445b9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.757938] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "d7ec9b10-5975-4148-9931-3e7b0999b373" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.758186] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "d7ec9b10-5975-4148-9931-3e7b0999b373" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.801380] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc3f0f5-1c20-4699-b65a-191fc9fbdd07 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.813643] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4db588-04b8-4210-8028-8604af2a636b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.818355] env[62476]: DEBUG nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 645.835740] env[62476]: DEBUG nova.compute.provider_tree [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.846082] env[62476]: DEBUG nova.scheduler.client.report [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 645.868980] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.402s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.871575] env[62476]: DEBUG nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 645.917069] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.919994] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.922577] env[62476]: INFO nova.compute.claims [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.935922] env[62476]: DEBUG nova.compute.utils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 645.940138] env[62476]: DEBUG nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 645.940138] env[62476]: DEBUG nova.network.neutron [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 645.967364] env[62476]: DEBUG nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 646.116624] env[62476]: DEBUG nova.policy [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1630899252d14fb4b06dec356b58a6a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08b579a2cc654b49898bca9f0eccfa57', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 646.120921] env[62476]: DEBUG nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 646.155597] env[62476]: DEBUG nova.virt.hardware [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 646.155868] env[62476]: DEBUG nova.virt.hardware [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 646.156020] env[62476]: DEBUG nova.virt.hardware [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 646.156224] env[62476]: DEBUG nova.virt.hardware [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 646.156372] env[62476]: DEBUG nova.virt.hardware [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 646.156519] env[62476]: DEBUG nova.virt.hardware [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 646.156755] env[62476]: DEBUG nova.virt.hardware [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 646.157358] env[62476]: DEBUG nova.virt.hardware [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 646.157358] env[62476]: DEBUG nova.virt.hardware [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 646.157466] env[62476]: DEBUG nova.virt.hardware [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 646.157717] env[62476]: DEBUG nova.virt.hardware [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 646.158945] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ec4515-ada3-4681-b102-6489ce1f343f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.167714] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3255415e-6d4e-45fa-a0ce-c72e6b220b49 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.251675] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740af8ec-5625-45e9-8458-c2854f5857cc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.260559] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67646a8-ecb7-46d0-a824-e2840de466ba {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.309579] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7df3acf-7063-43aa-b557-56ccb82f575a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.318791] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa5813f-395e-4ae1-83dd-bbbd9e87232b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.335430] env[62476]: DEBUG nova.compute.provider_tree [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.353558] env[62476]: DEBUG nova.scheduler.client.report [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 646.376347] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.457s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.376347] env[62476]: DEBUG nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 646.457631] env[62476]: DEBUG nova.compute.utils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 646.459788] env[62476]: DEBUG nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 646.460081] env[62476]: DEBUG nova.network.neutron [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 646.476230] env[62476]: DEBUG nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 646.489791] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Acquiring lock "e41d1a8c-ad7e-4151-9745-04318b007dfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.490118] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Lock "e41d1a8c-ad7e-4151-9745-04318b007dfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.531780] env[62476]: DEBUG nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 646.582141] env[62476]: DEBUG nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 646.616061] env[62476]: DEBUG nova.virt.hardware [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 646.617160] env[62476]: DEBUG nova.virt.hardware [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 646.617482] env[62476]: DEBUG nova.virt.hardware [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 646.617895] env[62476]: DEBUG nova.virt.hardware [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 646.618723] env[62476]: DEBUG nova.virt.hardware [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 646.618723] env[62476]: DEBUG nova.virt.hardware [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 646.619104] env[62476]: DEBUG nova.virt.hardware [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 646.619629] env[62476]: DEBUG nova.virt.hardware [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 646.619809] env[62476]: DEBUG nova.virt.hardware [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 646.621176] env[62476]: DEBUG nova.virt.hardware [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 646.621176] env[62476]: DEBUG nova.virt.hardware [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 646.622139] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dcc4450-1de3-4591-8424-cc25ca06e3bc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.627641] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.628084] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.002s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.629663] env[62476]: INFO nova.compute.claims [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 646.644139] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4068dc97-3e09-4b31-bd8c-6463e48b1b31 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.670402] env[62476]: DEBUG nova.policy [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9766451a6ad6400e9de77ab77cfbccce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fffb0483f5b24640b59fdf6b6bf5b4b7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 646.945076] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c74136-14ed-4bd9-98a2-ce036d3c7060 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.954168] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98cf33a-f44f-4b51-a3dd-55a796c11ade {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.991075] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f71943-f22e-4c22-a391-548df963b308 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.002148] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006302f2-0458-4293-a1c5-713d22ce901b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.017009] env[62476]: DEBUG nova.compute.provider_tree [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.029595] env[62476]: DEBUG nova.scheduler.client.report [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 647.055627] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.427s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.056145] env[62476]: DEBUG nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 647.126071] env[62476]: DEBUG nova.compute.utils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 647.126660] env[62476]: DEBUG nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 647.126754] env[62476]: DEBUG nova.network.neutron [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 647.146140] env[62476]: DEBUG nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 647.231465] env[62476]: DEBUG nova.network.neutron [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Successfully created port: bfdc90a7-dbd5-4543-8bbf-de586f9b69ec {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 647.275327] env[62476]: DEBUG nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 647.317282] env[62476]: DEBUG nova.virt.hardware [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 647.317282] env[62476]: DEBUG nova.virt.hardware [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 647.317598] env[62476]: DEBUG nova.virt.hardware [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.317598] env[62476]: DEBUG nova.virt.hardware [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 647.317907] env[62476]: DEBUG nova.virt.hardware [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.317907] env[62476]: DEBUG nova.virt.hardware [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 647.319270] env[62476]: DEBUG nova.virt.hardware [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 647.319486] env[62476]: DEBUG nova.virt.hardware [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 647.320354] env[62476]: DEBUG nova.virt.hardware [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 647.320544] env[62476]: DEBUG nova.virt.hardware [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 647.320750] env[62476]: DEBUG nova.virt.hardware [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 647.321684] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ea9240-5c23-4f81-8915-09e5657013c8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.337111] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f26133f-0244-4284-a369-292ce046148b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.535585] env[62476]: DEBUG nova.compute.manager [req-6cc2a645-b9d4-4fc6-a555-172c82018ab8 req-f78a5938-76d5-413e-bdb1-86bafa0ad18f service nova] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Received event network-vif-plugged-a8b0be21-1051-4e25-a2d0-ae49f7a396ed {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 647.535838] env[62476]: DEBUG oslo_concurrency.lockutils [req-6cc2a645-b9d4-4fc6-a555-172c82018ab8 req-f78a5938-76d5-413e-bdb1-86bafa0ad18f service nova] Acquiring lock "760f3c9b-044d-4593-bc97-535ac09c3f3b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.536062] env[62476]: DEBUG oslo_concurrency.lockutils [req-6cc2a645-b9d4-4fc6-a555-172c82018ab8 req-f78a5938-76d5-413e-bdb1-86bafa0ad18f service nova] Lock "760f3c9b-044d-4593-bc97-535ac09c3f3b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.536310] env[62476]: DEBUG oslo_concurrency.lockutils [req-6cc2a645-b9d4-4fc6-a555-172c82018ab8 req-f78a5938-76d5-413e-bdb1-86bafa0ad18f service nova] Lock "760f3c9b-044d-4593-bc97-535ac09c3f3b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.536717] env[62476]: DEBUG nova.compute.manager [req-6cc2a645-b9d4-4fc6-a555-172c82018ab8 req-f78a5938-76d5-413e-bdb1-86bafa0ad18f service nova] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] No waiting events found dispatching network-vif-plugged-a8b0be21-1051-4e25-a2d0-ae49f7a396ed {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 647.536717] env[62476]: WARNING nova.compute.manager [req-6cc2a645-b9d4-4fc6-a555-172c82018ab8 req-f78a5938-76d5-413e-bdb1-86bafa0ad18f service nova] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Received unexpected event network-vif-plugged-a8b0be21-1051-4e25-a2d0-ae49f7a396ed for instance with vm_state building and task_state spawning. [ 647.536717] env[62476]: DEBUG nova.compute.manager [req-6cc2a645-b9d4-4fc6-a555-172c82018ab8 req-f78a5938-76d5-413e-bdb1-86bafa0ad18f service nova] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Received event network-changed-a8b0be21-1051-4e25-a2d0-ae49f7a396ed {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 647.536899] env[62476]: DEBUG nova.compute.manager [req-6cc2a645-b9d4-4fc6-a555-172c82018ab8 req-f78a5938-76d5-413e-bdb1-86bafa0ad18f service nova] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Refreshing instance network info cache due to event network-changed-a8b0be21-1051-4e25-a2d0-ae49f7a396ed. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 647.537613] env[62476]: DEBUG oslo_concurrency.lockutils [req-6cc2a645-b9d4-4fc6-a555-172c82018ab8 req-f78a5938-76d5-413e-bdb1-86bafa0ad18f service nova] Acquiring lock "refresh_cache-760f3c9b-044d-4593-bc97-535ac09c3f3b" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.537613] env[62476]: DEBUG oslo_concurrency.lockutils [req-6cc2a645-b9d4-4fc6-a555-172c82018ab8 req-f78a5938-76d5-413e-bdb1-86bafa0ad18f service nova] Acquired lock "refresh_cache-760f3c9b-044d-4593-bc97-535ac09c3f3b" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.537613] env[62476]: DEBUG nova.network.neutron [req-6cc2a645-b9d4-4fc6-a555-172c82018ab8 req-f78a5938-76d5-413e-bdb1-86bafa0ad18f service nova] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Refreshing network info cache for port a8b0be21-1051-4e25-a2d0-ae49f7a396ed {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 647.676307] env[62476]: DEBUG nova.policy [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc7c98c2546b40c7adc0cb9e65dc7115', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bf8433ed9274514b6769d8f07ff9626', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 648.051606] env[62476]: DEBUG nova.network.neutron [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Successfully created port: 1e70c839-2ff7-4094-9690-89efac2b1221 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 648.873141] env[62476]: DEBUG nova.network.neutron [req-6cc2a645-b9d4-4fc6-a555-172c82018ab8 req-f78a5938-76d5-413e-bdb1-86bafa0ad18f service nova] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Updated VIF entry in instance network info cache for port a8b0be21-1051-4e25-a2d0-ae49f7a396ed. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 648.873514] env[62476]: DEBUG nova.network.neutron [req-6cc2a645-b9d4-4fc6-a555-172c82018ab8 req-f78a5938-76d5-413e-bdb1-86bafa0ad18f service nova] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Updating instance_info_cache with network_info: [{"id": "a8b0be21-1051-4e25-a2d0-ae49f7a396ed", "address": "fa:16:3e:de:bf:b7", "network": {"id": "7c53e6ec-f9a3-4a55-a6c2-57a6fc1afd1e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-595111746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f61870de4af4bd98fc7608e2e7a81a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8b0be21-10", "ovs_interfaceid": "a8b0be21-1051-4e25-a2d0-ae49f7a396ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.885936] env[62476]: DEBUG oslo_concurrency.lockutils [req-6cc2a645-b9d4-4fc6-a555-172c82018ab8 req-f78a5938-76d5-413e-bdb1-86bafa0ad18f service nova] Releasing lock "refresh_cache-760f3c9b-044d-4593-bc97-535ac09c3f3b" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.087555] env[62476]: DEBUG nova.network.neutron [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Successfully created port: 7de88137-4c84-46ea-a036-85b084059c94 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 649.461940] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "a918c107-526d-4cb7-a7dd-735a7d6420a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.462105] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "a918c107-526d-4cb7-a7dd-735a7d6420a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.881542] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.882013] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.279425] env[62476]: DEBUG nova.network.neutron [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Successfully updated port: bfdc90a7-dbd5-4543-8bbf-de586f9b69ec {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 650.296525] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Acquiring lock "refresh_cache-1323e67f-17c6-4432-8eea-98c285745766" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.296676] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Acquired lock "refresh_cache-1323e67f-17c6-4432-8eea-98c285745766" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.296806] env[62476]: DEBUG nova.network.neutron [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 650.673460] env[62476]: DEBUG nova.network.neutron [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 651.478684] env[62476]: DEBUG nova.network.neutron [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Successfully updated port: 7de88137-4c84-46ea-a036-85b084059c94 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 651.495965] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Acquiring lock "refresh_cache-e41d1a8c-ad7e-4151-9745-04318b007dfa" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.496160] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Acquired lock "refresh_cache-e41d1a8c-ad7e-4151-9745-04318b007dfa" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.496268] env[62476]: DEBUG nova.network.neutron [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 651.590340] env[62476]: DEBUG nova.network.neutron [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 651.933894] env[62476]: DEBUG nova.network.neutron [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Successfully updated port: 1e70c839-2ff7-4094-9690-89efac2b1221 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 651.946204] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "refresh_cache-d7ec9b10-5975-4148-9931-3e7b0999b373" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.946707] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquired lock "refresh_cache-d7ec9b10-5975-4148-9931-3e7b0999b373" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.947114] env[62476]: DEBUG nova.network.neutron [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 651.950629] env[62476]: DEBUG nova.network.neutron [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Updating instance_info_cache with network_info: [{"id": "bfdc90a7-dbd5-4543-8bbf-de586f9b69ec", "address": "fa:16:3e:fe:01:ef", "network": {"id": "c9bd4cb3-6d55-4e63-916a-240aa02e8be7", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1557963340-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08b579a2cc654b49898bca9f0eccfa57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97113f46-d648-4613-b233-069acba18198", "external-id": "nsx-vlan-transportzone-480", "segmentation_id": 480, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfdc90a7-db", "ovs_interfaceid": "bfdc90a7-dbd5-4543-8bbf-de586f9b69ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.978206] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Releasing lock "refresh_cache-1323e67f-17c6-4432-8eea-98c285745766" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.978584] env[62476]: DEBUG nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Instance network_info: |[{"id": "bfdc90a7-dbd5-4543-8bbf-de586f9b69ec", "address": "fa:16:3e:fe:01:ef", "network": {"id": "c9bd4cb3-6d55-4e63-916a-240aa02e8be7", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1557963340-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08b579a2cc654b49898bca9f0eccfa57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97113f46-d648-4613-b233-069acba18198", "external-id": "nsx-vlan-transportzone-480", "segmentation_id": 480, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfdc90a7-db", "ovs_interfaceid": "bfdc90a7-dbd5-4543-8bbf-de586f9b69ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 651.979886] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:01:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '97113f46-d648-4613-b233-069acba18198', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bfdc90a7-dbd5-4543-8bbf-de586f9b69ec', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 651.989017] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Creating folder: Project (08b579a2cc654b49898bca9f0eccfa57). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 651.990020] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-767a880f-4f58-4ac2-9491-0d7e76b3d64c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.004900] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Created folder: Project (08b579a2cc654b49898bca9f0eccfa57) in parent group-v849485. [ 652.005182] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Creating folder: Instances. Parent ref: group-v849507. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 652.005511] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b31e895e-c51c-4fae-9ca9-adae194d45a4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.016992] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Created folder: Instances in parent group-v849507. [ 652.017297] env[62476]: DEBUG oslo.service.loopingcall [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.017500] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 652.017904] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1727f59-5fb4-4dfe-b0f9-5fb0fe086262 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.037860] env[62476]: DEBUG nova.network.neutron [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Updating instance_info_cache with network_info: [{"id": "7de88137-4c84-46ea-a036-85b084059c94", "address": "fa:16:3e:23:a1:b8", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7de88137-4c", "ovs_interfaceid": "7de88137-4c84-46ea-a036-85b084059c94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.041798] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.041798] env[62476]: value = "task-4319021" [ 652.041798] env[62476]: _type = "Task" [ 652.041798] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.055314] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319021, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.062345] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Releasing lock "refresh_cache-e41d1a8c-ad7e-4151-9745-04318b007dfa" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.062645] env[62476]: DEBUG nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Instance network_info: |[{"id": "7de88137-4c84-46ea-a036-85b084059c94", "address": "fa:16:3e:23:a1:b8", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7de88137-4c", "ovs_interfaceid": "7de88137-4c84-46ea-a036-85b084059c94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 652.063130] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:a1:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '715e3f37-7401-48fb-a0ee-59d340b40de1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7de88137-4c84-46ea-a036-85b084059c94', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 652.072558] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Creating folder: Project (6bf8433ed9274514b6769d8f07ff9626). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 652.073333] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce089b4e-a1c9-4a5d-a2f8-b4f890a22f10 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.085991] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Created folder: Project (6bf8433ed9274514b6769d8f07ff9626) in parent group-v849485. [ 652.086237] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Creating folder: Instances. Parent ref: group-v849510. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 652.086513] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-719489a2-c1aa-425e-95dc-a8fffbbf1841 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.098184] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Created folder: Instances in parent group-v849510. [ 652.098473] env[62476]: DEBUG oslo.service.loopingcall [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.102024] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 652.102024] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d07d744-2bdd-413d-be4a-0538e88225bf {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.116963] env[62476]: DEBUG nova.network.neutron [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 652.127630] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.127630] env[62476]: value = "task-4319024" [ 652.127630] env[62476]: _type = "Task" [ 652.127630] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.140247] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319024, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.556802] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319021, 'name': CreateVM_Task, 'duration_secs': 0.471159} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.557084] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 652.557757] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.557918] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.558289] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 652.558512] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-203246e1-a355-41de-a172-2c570edabad1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.568578] env[62476]: DEBUG oslo_vmware.api [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Waiting for the task: (returnval){ [ 652.568578] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52f918ab-16e6-1656-4a2a-8400012d7860" [ 652.568578] env[62476]: _type = "Task" [ 652.568578] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.583597] env[62476]: DEBUG oslo_vmware.api [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52f918ab-16e6-1656-4a2a-8400012d7860, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.640571] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319024, 'name': CreateVM_Task} progress is 99%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.855275] env[62476]: DEBUG nova.network.neutron [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Updating instance_info_cache with network_info: [{"id": "1e70c839-2ff7-4094-9690-89efac2b1221", "address": "fa:16:3e:0d:be:5f", "network": {"id": "e4c40932-e3ae-4a9e-8be0-03603b3183a1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1410291433-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fffb0483f5b24640b59fdf6b6bf5b4b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e70c839-2f", "ovs_interfaceid": "1e70c839-2ff7-4094-9690-89efac2b1221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.867511] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquiring lock "029e3122-7587-4675-b9d9-47cf8ffdbd1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.867790] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Lock "029e3122-7587-4675-b9d9-47cf8ffdbd1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.872409] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Releasing lock "refresh_cache-d7ec9b10-5975-4148-9931-3e7b0999b373" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.872705] env[62476]: DEBUG nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Instance network_info: |[{"id": "1e70c839-2ff7-4094-9690-89efac2b1221", "address": "fa:16:3e:0d:be:5f", "network": {"id": "e4c40932-e3ae-4a9e-8be0-03603b3183a1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1410291433-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fffb0483f5b24640b59fdf6b6bf5b4b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e70c839-2f", "ovs_interfaceid": "1e70c839-2ff7-4094-9690-89efac2b1221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 652.874167] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:be:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3291573-fad8-48cc-a965-c3554e7cee4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e70c839-2ff7-4094-9690-89efac2b1221', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 652.881918] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Creating folder: Project (fffb0483f5b24640b59fdf6b6bf5b4b7). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 652.884893] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47468b75-e9a1-42c6-9396-347122c6be98 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.896675] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Created folder: Project (fffb0483f5b24640b59fdf6b6bf5b4b7) in parent group-v849485. [ 652.897027] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Creating folder: Instances. Parent ref: group-v849513. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 652.897127] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9f67cac-3919-46c8-9386-28712d873ff5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.912042] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Created folder: Instances in parent group-v849513. [ 652.912339] env[62476]: DEBUG oslo.service.loopingcall [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.912534] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 652.912739] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3770dae-d7d4-4592-bd17-fd0e8edbbcec {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.936044] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.936044] env[62476]: value = "task-4319027" [ 652.936044] env[62476]: _type = "Task" [ 652.936044] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.946967] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319027, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.082594] env[62476]: DEBUG nova.compute.manager [req-903223da-683a-4124-8f38-1dcb474856b3 req-7b93077b-739e-4ec2-ab00-9992904e9744 service nova] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Received event network-vif-plugged-bfdc90a7-dbd5-4543-8bbf-de586f9b69ec {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 653.082594] env[62476]: DEBUG oslo_concurrency.lockutils [req-903223da-683a-4124-8f38-1dcb474856b3 req-7b93077b-739e-4ec2-ab00-9992904e9744 service nova] Acquiring lock "1323e67f-17c6-4432-8eea-98c285745766-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.082594] env[62476]: DEBUG oslo_concurrency.lockutils [req-903223da-683a-4124-8f38-1dcb474856b3 req-7b93077b-739e-4ec2-ab00-9992904e9744 service nova] Lock "1323e67f-17c6-4432-8eea-98c285745766-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.082594] env[62476]: DEBUG oslo_concurrency.lockutils [req-903223da-683a-4124-8f38-1dcb474856b3 req-7b93077b-739e-4ec2-ab00-9992904e9744 service nova] Lock "1323e67f-17c6-4432-8eea-98c285745766-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.082737] env[62476]: DEBUG nova.compute.manager [req-903223da-683a-4124-8f38-1dcb474856b3 req-7b93077b-739e-4ec2-ab00-9992904e9744 service nova] [instance: 1323e67f-17c6-4432-8eea-98c285745766] No waiting events found dispatching network-vif-plugged-bfdc90a7-dbd5-4543-8bbf-de586f9b69ec {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 653.082737] env[62476]: WARNING nova.compute.manager [req-903223da-683a-4124-8f38-1dcb474856b3 req-7b93077b-739e-4ec2-ab00-9992904e9744 service nova] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Received unexpected event network-vif-plugged-bfdc90a7-dbd5-4543-8bbf-de586f9b69ec for instance with vm_state building and task_state spawning. [ 653.090120] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.090911] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 653.090911] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.143690] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319024, 'name': CreateVM_Task, 'duration_secs': 0.533268} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.144273] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 653.145245] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.145474] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.146110] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 653.146365] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3185ad2a-b3c2-4bf6-8139-5eb0aec64c2c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.155346] env[62476]: DEBUG oslo_vmware.api [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Waiting for the task: (returnval){ [ 653.155346] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52d2aadb-7a04-ebbd-ae9a-e9e077a49b32" [ 653.155346] env[62476]: _type = "Task" [ 653.155346] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.170131] env[62476]: DEBUG oslo_vmware.api [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52d2aadb-7a04-ebbd-ae9a-e9e077a49b32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.455035] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319027, 'name': CreateVM_Task, 'duration_secs': 0.446233} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.455359] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 653.456116] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.674169] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.674169] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 653.674169] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.674169] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.674512] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 653.674630] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0eb9ae4-dcff-4f9f-8b35-6cbd2d9dd80a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.682803] env[62476]: DEBUG oslo_vmware.api [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Waiting for the task: (returnval){ [ 653.682803] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52adc9e3-f339-d57b-0147-bad107ecf8dd" [ 653.682803] env[62476]: _type = "Task" [ 653.682803] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.696053] env[62476]: DEBUG oslo_vmware.api [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52adc9e3-f339-d57b-0147-bad107ecf8dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.196715] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.197138] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.197533] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.558338] env[62476]: DEBUG nova.compute.manager [req-fc774e4b-a666-4156-be57-f7826b779fdb req-9a70b7df-4cd9-4530-bc85-115ae49df7ae service nova] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Received event network-vif-plugged-7de88137-4c84-46ea-a036-85b084059c94 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 654.559048] env[62476]: DEBUG oslo_concurrency.lockutils [req-fc774e4b-a666-4156-be57-f7826b779fdb req-9a70b7df-4cd9-4530-bc85-115ae49df7ae service nova] Acquiring lock "e41d1a8c-ad7e-4151-9745-04318b007dfa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.559048] env[62476]: DEBUG oslo_concurrency.lockutils [req-fc774e4b-a666-4156-be57-f7826b779fdb req-9a70b7df-4cd9-4530-bc85-115ae49df7ae service nova] Lock "e41d1a8c-ad7e-4151-9745-04318b007dfa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.559432] env[62476]: DEBUG oslo_concurrency.lockutils [req-fc774e4b-a666-4156-be57-f7826b779fdb req-9a70b7df-4cd9-4530-bc85-115ae49df7ae service nova] Lock "e41d1a8c-ad7e-4151-9745-04318b007dfa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.559735] env[62476]: DEBUG nova.compute.manager [req-fc774e4b-a666-4156-be57-f7826b779fdb req-9a70b7df-4cd9-4530-bc85-115ae49df7ae service nova] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] No waiting events found dispatching network-vif-plugged-7de88137-4c84-46ea-a036-85b084059c94 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 654.559967] env[62476]: WARNING nova.compute.manager [req-fc774e4b-a666-4156-be57-f7826b779fdb req-9a70b7df-4cd9-4530-bc85-115ae49df7ae service nova] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Received unexpected event network-vif-plugged-7de88137-4c84-46ea-a036-85b084059c94 for instance with vm_state building and task_state spawning. [ 655.094553] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Acquiring lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.094839] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.018805] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d0fa9ce0-5864-4e3c-af72-8e3c8bc89cb0 tempest-ServersTestFqdnHostnames-1462908703 tempest-ServersTestFqdnHostnames-1462908703-project-member] Acquiring lock "a9eb98c9-73cd-453f-9d6f-f6fe5e0a6c0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.019030] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d0fa9ce0-5864-4e3c-af72-8e3c8bc89cb0 tempest-ServersTestFqdnHostnames-1462908703 tempest-ServersTestFqdnHostnames-1462908703-project-member] Lock "a9eb98c9-73cd-453f-9d6f-f6fe5e0a6c0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.838427] env[62476]: DEBUG nova.compute.manager [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Received event network-changed-bfdc90a7-dbd5-4543-8bbf-de586f9b69ec {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 656.838427] env[62476]: DEBUG nova.compute.manager [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Refreshing instance network info cache due to event network-changed-bfdc90a7-dbd5-4543-8bbf-de586f9b69ec. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 656.838427] env[62476]: DEBUG oslo_concurrency.lockutils [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] Acquiring lock "refresh_cache-1323e67f-17c6-4432-8eea-98c285745766" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.838427] env[62476]: DEBUG oslo_concurrency.lockutils [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] Acquired lock "refresh_cache-1323e67f-17c6-4432-8eea-98c285745766" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.838427] env[62476]: DEBUG nova.network.neutron [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Refreshing network info cache for port bfdc90a7-dbd5-4543-8bbf-de586f9b69ec {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 657.850240] env[62476]: DEBUG nova.network.neutron [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Updated VIF entry in instance network info cache for port bfdc90a7-dbd5-4543-8bbf-de586f9b69ec. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 657.850645] env[62476]: DEBUG nova.network.neutron [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Updating instance_info_cache with network_info: [{"id": "bfdc90a7-dbd5-4543-8bbf-de586f9b69ec", "address": "fa:16:3e:fe:01:ef", "network": {"id": "c9bd4cb3-6d55-4e63-916a-240aa02e8be7", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1557963340-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08b579a2cc654b49898bca9f0eccfa57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97113f46-d648-4613-b233-069acba18198", "external-id": "nsx-vlan-transportzone-480", "segmentation_id": 480, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfdc90a7-db", "ovs_interfaceid": "bfdc90a7-dbd5-4543-8bbf-de586f9b69ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.871475] env[62476]: DEBUG oslo_concurrency.lockutils [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] Releasing lock "refresh_cache-1323e67f-17c6-4432-8eea-98c285745766" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.871666] env[62476]: DEBUG nova.compute.manager [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Received event network-vif-plugged-1e70c839-2ff7-4094-9690-89efac2b1221 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 657.874489] env[62476]: DEBUG oslo_concurrency.lockutils [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] Acquiring lock "d7ec9b10-5975-4148-9931-3e7b0999b373-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.874489] env[62476]: DEBUG oslo_concurrency.lockutils [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] Lock "d7ec9b10-5975-4148-9931-3e7b0999b373-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.874489] env[62476]: DEBUG oslo_concurrency.lockutils [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] Lock "d7ec9b10-5975-4148-9931-3e7b0999b373-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.874489] env[62476]: DEBUG nova.compute.manager [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] No waiting events found dispatching network-vif-plugged-1e70c839-2ff7-4094-9690-89efac2b1221 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 657.874906] env[62476]: WARNING nova.compute.manager [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Received unexpected event network-vif-plugged-1e70c839-2ff7-4094-9690-89efac2b1221 for instance with vm_state building and task_state spawning. [ 657.874906] env[62476]: DEBUG nova.compute.manager [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Received event network-changed-1e70c839-2ff7-4094-9690-89efac2b1221 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 657.874906] env[62476]: DEBUG nova.compute.manager [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Refreshing instance network info cache due to event network-changed-1e70c839-2ff7-4094-9690-89efac2b1221. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 657.874906] env[62476]: DEBUG oslo_concurrency.lockutils [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] Acquiring lock "refresh_cache-d7ec9b10-5975-4148-9931-3e7b0999b373" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.874906] env[62476]: DEBUG oslo_concurrency.lockutils [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] Acquired lock "refresh_cache-d7ec9b10-5975-4148-9931-3e7b0999b373" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.875072] env[62476]: DEBUG nova.network.neutron [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Refreshing network info cache for port 1e70c839-2ff7-4094-9690-89efac2b1221 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 658.507368] env[62476]: DEBUG oslo_concurrency.lockutils [None req-78244f68-4174-4661-bf27-e91b3e28ad11 tempest-FloatingIPsAssociationTestJSON-748075487 tempest-FloatingIPsAssociationTestJSON-748075487-project-member] Acquiring lock "d271fa1d-d7f3-4abf-9b5f-69396c4c128c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.507633] env[62476]: DEBUG oslo_concurrency.lockutils [None req-78244f68-4174-4661-bf27-e91b3e28ad11 tempest-FloatingIPsAssociationTestJSON-748075487 tempest-FloatingIPsAssociationTestJSON-748075487-project-member] Lock "d271fa1d-d7f3-4abf-9b5f-69396c4c128c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.647888] env[62476]: DEBUG nova.network.neutron [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Updated VIF entry in instance network info cache for port 1e70c839-2ff7-4094-9690-89efac2b1221. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 658.647888] env[62476]: DEBUG nova.network.neutron [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Updating instance_info_cache with network_info: [{"id": "1e70c839-2ff7-4094-9690-89efac2b1221", "address": "fa:16:3e:0d:be:5f", "network": {"id": "e4c40932-e3ae-4a9e-8be0-03603b3183a1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1410291433-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fffb0483f5b24640b59fdf6b6bf5b4b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e70c839-2f", "ovs_interfaceid": "1e70c839-2ff7-4094-9690-89efac2b1221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.658612] env[62476]: DEBUG oslo_concurrency.lockutils [req-7c87c064-b4cc-4315-a832-bd6411094038 req-9dc9b64f-1c39-49ca-8440-fb02f0304ce1 service nova] Releasing lock "refresh_cache-d7ec9b10-5975-4148-9931-3e7b0999b373" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.218166] env[62476]: DEBUG nova.compute.manager [req-46bd833a-5f65-4164-b699-a65b367bf0ca req-d6c36a5d-341a-46fb-9084-9b3365aeaf95 service nova] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Received event network-changed-7de88137-4c84-46ea-a036-85b084059c94 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 659.218405] env[62476]: DEBUG nova.compute.manager [req-46bd833a-5f65-4164-b699-a65b367bf0ca req-d6c36a5d-341a-46fb-9084-9b3365aeaf95 service nova] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Refreshing instance network info cache due to event network-changed-7de88137-4c84-46ea-a036-85b084059c94. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 659.218576] env[62476]: DEBUG oslo_concurrency.lockutils [req-46bd833a-5f65-4164-b699-a65b367bf0ca req-d6c36a5d-341a-46fb-9084-9b3365aeaf95 service nova] Acquiring lock "refresh_cache-e41d1a8c-ad7e-4151-9745-04318b007dfa" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.218627] env[62476]: DEBUG oslo_concurrency.lockutils [req-46bd833a-5f65-4164-b699-a65b367bf0ca req-d6c36a5d-341a-46fb-9084-9b3365aeaf95 service nova] Acquired lock "refresh_cache-e41d1a8c-ad7e-4151-9745-04318b007dfa" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.218789] env[62476]: DEBUG nova.network.neutron [req-46bd833a-5f65-4164-b699-a65b367bf0ca req-d6c36a5d-341a-46fb-9084-9b3365aeaf95 service nova] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Refreshing network info cache for port 7de88137-4c84-46ea-a036-85b084059c94 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 659.705551] env[62476]: DEBUG nova.network.neutron [req-46bd833a-5f65-4164-b699-a65b367bf0ca req-d6c36a5d-341a-46fb-9084-9b3365aeaf95 service nova] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Updated VIF entry in instance network info cache for port 7de88137-4c84-46ea-a036-85b084059c94. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 659.705880] env[62476]: DEBUG nova.network.neutron [req-46bd833a-5f65-4164-b699-a65b367bf0ca req-d6c36a5d-341a-46fb-9084-9b3365aeaf95 service nova] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Updating instance_info_cache with network_info: [{"id": "7de88137-4c84-46ea-a036-85b084059c94", "address": "fa:16:3e:23:a1:b8", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7de88137-4c", "ovs_interfaceid": "7de88137-4c84-46ea-a036-85b084059c94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.719675] env[62476]: DEBUG oslo_concurrency.lockutils [req-46bd833a-5f65-4164-b699-a65b367bf0ca req-d6c36a5d-341a-46fb-9084-9b3365aeaf95 service nova] Releasing lock "refresh_cache-e41d1a8c-ad7e-4151-9745-04318b007dfa" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.091727] env[62476]: DEBUG oslo_concurrency.lockutils [None req-bc6e10b8-37c6-411b-90ff-f54796f5d48b tempest-ServerDiagnosticsTest-736104627 tempest-ServerDiagnosticsTest-736104627-project-member] Acquiring lock "a0b1a835-21b9-4d9d-a5d7-35479a8c4b6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.092146] env[62476]: DEBUG oslo_concurrency.lockutils [None req-bc6e10b8-37c6-411b-90ff-f54796f5d48b tempest-ServerDiagnosticsTest-736104627 tempest-ServerDiagnosticsTest-736104627-project-member] Lock "a0b1a835-21b9-4d9d-a5d7-35479a8c4b6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.557293] env[62476]: DEBUG oslo_concurrency.lockutils [None req-4faee79d-d499-4240-8183-c3c0b58a49dc tempest-ServerActionsTestJSON-1910103505 tempest-ServerActionsTestJSON-1910103505-project-member] Acquiring lock "da5c9742-9dba-4691-9bda-25858915857b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.557838] env[62476]: DEBUG oslo_concurrency.lockutils [None req-4faee79d-d499-4240-8183-c3c0b58a49dc tempest-ServerActionsTestJSON-1910103505 tempest-ServerActionsTestJSON-1910103505-project-member] Lock "da5c9742-9dba-4691-9bda-25858915857b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.214295] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2c6ec62f-f8c2-427a-b0fd-53ec2fe9e897 tempest-ServersWithSpecificFlavorTestJSON-1581171592 tempest-ServersWithSpecificFlavorTestJSON-1581171592-project-member] Acquiring lock "3d2bc5b9-cef1-4c45-a493-00a89bd2dfcf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.214611] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2c6ec62f-f8c2-427a-b0fd-53ec2fe9e897 tempest-ServersWithSpecificFlavorTestJSON-1581171592 tempest-ServersWithSpecificFlavorTestJSON-1581171592-project-member] Lock "3d2bc5b9-cef1-4c45-a493-00a89bd2dfcf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.241085] env[62476]: DEBUG oslo_concurrency.lockutils [None req-69cc9f62-bbf1-4f34-856f-a41bdb4f078e tempest-MigrationsAdminTest-1701507521 tempest-MigrationsAdminTest-1701507521-project-member] Acquiring lock "ab7da5f4-9460-4d70-a0e5-5a690284d0e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.241819] env[62476]: DEBUG oslo_concurrency.lockutils [None req-69cc9f62-bbf1-4f34-856f-a41bdb4f078e tempest-MigrationsAdminTest-1701507521 tempest-MigrationsAdminTest-1701507521-project-member] Lock "ab7da5f4-9460-4d70-a0e5-5a690284d0e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.688409] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ae344a3a-18e6-4e18-a5f0-9a20e91179db tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquiring lock "380bc9c3-8bba-4f26-b938-e4e74543261c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.688708] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ae344a3a-18e6-4e18-a5f0-9a20e91179db tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "380bc9c3-8bba-4f26-b938-e4e74543261c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.714908] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ae344a3a-18e6-4e18-a5f0-9a20e91179db tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquiring lock "971698f2-c127-4f21-ae3f-3bb863742982" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.715106] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ae344a3a-18e6-4e18-a5f0-9a20e91179db tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "971698f2-c127-4f21-ae3f-3bb863742982" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.449501] env[62476]: WARNING oslo_vmware.rw_handles [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 672.449501] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 672.449501] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 672.449501] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 672.449501] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 672.449501] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 672.449501] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 672.449501] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 672.449501] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 672.449501] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 672.449501] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 672.449501] env[62476]: ERROR oslo_vmware.rw_handles [ 672.450193] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/818c9aa7-ee9b-483b-b852-70afaa96745f/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 672.451139] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 672.451767] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Copying Virtual Disk [datastore1] vmware_temp/818c9aa7-ee9b-483b-b852-70afaa96745f/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/818c9aa7-ee9b-483b-b852-70afaa96745f/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 672.452096] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a27c872-1d54-4cd4-b073-75dbc9c3cb71 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.461955] env[62476]: DEBUG oslo_vmware.api [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Waiting for the task: (returnval){ [ 672.461955] env[62476]: value = "task-4319028" [ 672.461955] env[62476]: _type = "Task" [ 672.461955] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.470226] env[62476]: DEBUG oslo_vmware.api [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Task: {'id': task-4319028, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.974312] env[62476]: DEBUG oslo_vmware.exceptions [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 672.974651] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.978585] env[62476]: ERROR nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 672.978585] env[62476]: Faults: ['InvalidArgument'] [ 672.978585] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Traceback (most recent call last): [ 672.978585] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 672.978585] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] yield resources [ 672.978585] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 672.978585] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] self.driver.spawn(context, instance, image_meta, [ 672.978585] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 672.978585] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.978585] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 672.978585] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] self._fetch_image_if_missing(context, vi) [ 672.978585] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 672.980179] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] image_cache(vi, tmp_image_ds_loc) [ 672.980179] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 672.980179] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] vm_util.copy_virtual_disk( [ 672.980179] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 672.980179] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] session._wait_for_task(vmdk_copy_task) [ 672.980179] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 672.980179] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] return self.wait_for_task(task_ref) [ 672.980179] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 672.980179] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] return evt.wait() [ 672.980179] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 672.980179] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] result = hub.switch() [ 672.980179] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 672.980179] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] return self.greenlet.switch() [ 672.980554] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 672.980554] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] self.f(*self.args, **self.kw) [ 672.980554] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 672.980554] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] raise exceptions.translate_fault(task_info.error) [ 672.980554] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 672.980554] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Faults: ['InvalidArgument'] [ 672.980554] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] [ 672.980554] env[62476]: INFO nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Terminating instance [ 672.980554] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.980816] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 672.982722] env[62476]: DEBUG nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 672.982722] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 672.982722] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2dbe6490-0621-4104-a1bf-aaa230ae6f19 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.989038] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dafb4ec6-e508-4213-8a3f-d83ba240fb59 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.002022] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 673.002022] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb6bc842-ba7c-4a4a-b5d0-4fa89a544ddc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.004117] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.004117] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 673.005095] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3975d9b-9563-4f05-b61b-48d13320cb2f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.011753] env[62476]: DEBUG oslo_vmware.api [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Waiting for the task: (returnval){ [ 673.011753] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]524eb6f2-8de3-1334-3cbb-4f749db5b065" [ 673.011753] env[62476]: _type = "Task" [ 673.011753] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.020795] env[62476]: DEBUG oslo_vmware.api [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]524eb6f2-8de3-1334-3cbb-4f749db5b065, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.085571] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 673.086549] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 673.086549] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Deleting the datastore file [datastore1] 5f15094d-b066-4025-af5d-4ed35af2dfee {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 673.087038] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14a2e912-ac26-41f2-b3cf-1ae9278304ad {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.095922] env[62476]: DEBUG oslo_vmware.api [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Waiting for the task: (returnval){ [ 673.095922] env[62476]: value = "task-4319030" [ 673.095922] env[62476]: _type = "Task" [ 673.095922] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.104873] env[62476]: DEBUG oslo_vmware.api [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Task: {'id': task-4319030, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.525880] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 673.527780] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Creating directory with path [datastore1] vmware_temp/8dedca10-3835-4a79-904b-57cb3ccb22bd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.528128] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a75ddbc-538c-4421-9bc1-212fdc605d7f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.546469] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Created directory with path [datastore1] vmware_temp/8dedca10-3835-4a79-904b-57cb3ccb22bd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.546469] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Fetch image to [datastore1] vmware_temp/8dedca10-3835-4a79-904b-57cb3ccb22bd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 673.546469] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/8dedca10-3835-4a79-904b-57cb3ccb22bd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 673.547712] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676f992b-befc-41cf-bd43-c189ec19c67d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.558132] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b406663a-ce74-41c3-a3be-bfc8f3e2e559 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.570159] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d0508d-710f-416f-b283-613268051d2f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.608847] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6513fd7a-c5db-46b2-9148-dba6e5e36c8e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.618891] env[62476]: DEBUG oslo_vmware.api [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Task: {'id': task-4319030, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095906} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.619494] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 673.620576] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 673.620576] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 673.620576] env[62476]: INFO nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Took 0.64 seconds to destroy the instance on the hypervisor. [ 673.622247] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0a5e1174-c5b0-4279-878c-37d308e9c422 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.626297] env[62476]: DEBUG nova.compute.claims [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 673.626297] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.626297] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.661079] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 673.779846] env[62476]: DEBUG oslo_vmware.rw_handles [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8dedca10-3835-4a79-904b-57cb3ccb22bd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 673.874985] env[62476]: DEBUG oslo_vmware.rw_handles [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 673.876518] env[62476]: DEBUG oslo_vmware.rw_handles [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8dedca10-3835-4a79-904b-57cb3ccb22bd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 674.257791] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928964d7-d3e5-40de-af92-2e5f096c6cbe {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.266467] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010107b1-d337-4cc7-ac49-12685c8fc567 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.299481] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c54d869-a214-42fb-ad35-f8ac90ab9ca2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.308015] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f33ee1-7ed2-45bf-b041-752b6a93f0f8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.324185] env[62476]: DEBUG nova.compute.provider_tree [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.334741] env[62476]: DEBUG nova.scheduler.client.report [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 674.354329] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.728s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.354932] env[62476]: ERROR nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 674.354932] env[62476]: Faults: ['InvalidArgument'] [ 674.354932] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Traceback (most recent call last): [ 674.354932] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 674.354932] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] self.driver.spawn(context, instance, image_meta, [ 674.354932] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 674.354932] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 674.354932] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 674.354932] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] self._fetch_image_if_missing(context, vi) [ 674.354932] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 674.354932] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] image_cache(vi, tmp_image_ds_loc) [ 674.354932] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 674.355548] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] vm_util.copy_virtual_disk( [ 674.355548] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 674.355548] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] session._wait_for_task(vmdk_copy_task) [ 674.355548] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 674.355548] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] return self.wait_for_task(task_ref) [ 674.355548] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 674.355548] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] return evt.wait() [ 674.355548] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 674.355548] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] result = hub.switch() [ 674.355548] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 674.355548] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] return self.greenlet.switch() [ 674.355548] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 674.355548] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] self.f(*self.args, **self.kw) [ 674.357200] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 674.357200] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] raise exceptions.translate_fault(task_info.error) [ 674.357200] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 674.357200] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Faults: ['InvalidArgument'] [ 674.357200] env[62476]: ERROR nova.compute.manager [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] [ 674.357200] env[62476]: DEBUG nova.compute.utils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 674.360647] env[62476]: DEBUG nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Build of instance 5f15094d-b066-4025-af5d-4ed35af2dfee was re-scheduled: A specified parameter was not correct: fileType [ 674.360647] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 674.362017] env[62476]: DEBUG nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 674.362017] env[62476]: DEBUG nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 674.362017] env[62476]: DEBUG nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 674.362017] env[62476]: DEBUG nova.network.neutron [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 675.415383] env[62476]: DEBUG nova.network.neutron [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.439909] env[62476]: INFO nova.compute.manager [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] [instance: 5f15094d-b066-4025-af5d-4ed35af2dfee] Took 1.08 seconds to deallocate network for instance. [ 675.598185] env[62476]: INFO nova.scheduler.client.report [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Deleted allocations for instance 5f15094d-b066-4025-af5d-4ed35af2dfee [ 675.865199] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b9b60cef-5ffd-4dca-9475-748c564e7f3a tempest-FloatingIPsAssociationNegativeTestJSON-1287748814 tempest-FloatingIPsAssociationNegativeTestJSON-1287748814-project-member] Lock "5f15094d-b066-4025-af5d-4ed35af2dfee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.582s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.865199] env[62476]: DEBUG nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 675.865199] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.865199] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.002s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.873532] env[62476]: INFO nova.compute.claims [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 676.371351] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b1ff88-f108-4ea9-9ff6-51a6887a3446 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.380704] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0631583-db78-4185-8b6e-2b2956c18f31 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.416789] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db644c72-3d83-4804-99ef-ce6c832a1bde {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.426629] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f040ded9-f918-48ab-acc6-a429913fdd67 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.444954] env[62476]: DEBUG nova.compute.provider_tree [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.457147] env[62476]: DEBUG nova.scheduler.client.report [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 676.478380] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.671s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.479097] env[62476]: DEBUG nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 676.533423] env[62476]: DEBUG nova.compute.utils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 676.533423] env[62476]: DEBUG nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 676.533642] env[62476]: DEBUG nova.network.neutron [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 676.545452] env[62476]: DEBUG nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 676.637930] env[62476]: DEBUG nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 676.668351] env[62476]: DEBUG nova.virt.hardware [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 676.668597] env[62476]: DEBUG nova.virt.hardware [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 676.668793] env[62476]: DEBUG nova.virt.hardware [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 676.669074] env[62476]: DEBUG nova.virt.hardware [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 676.669240] env[62476]: DEBUG nova.virt.hardware [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 676.669399] env[62476]: DEBUG nova.virt.hardware [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 676.669624] env[62476]: DEBUG nova.virt.hardware [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 676.669840] env[62476]: DEBUG nova.virt.hardware [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 676.670566] env[62476]: DEBUG nova.virt.hardware [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 676.670688] env[62476]: DEBUG nova.virt.hardware [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 676.670923] env[62476]: DEBUG nova.virt.hardware [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 676.672080] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af553ed9-c28f-42a9-abe1-628292c233db {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.683331] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9807e7-163f-4de9-83a6-6d4ab956fc74 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.736479] env[62476]: DEBUG nova.policy [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9648eddc7b4f4faca088386c6fef463f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d2e4aa6fb2247a9adde574f155bb4d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 677.381218] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "0561164b-f3f9-446f-b597-4b6d16a32a00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.381766] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "0561164b-f3f9-446f-b597-4b6d16a32a00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.829063] env[62476]: DEBUG oslo_concurrency.lockutils [None req-642fde63-9cbf-42b1-b949-64d527549465 tempest-AttachInterfacesUnderV243Test-1728856832 tempest-AttachInterfacesUnderV243Test-1728856832-project-member] Acquiring lock "327a282e-b502-4644-a152-0e77ec399fe7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.829950] env[62476]: DEBUG oslo_concurrency.lockutils [None req-642fde63-9cbf-42b1-b949-64d527549465 tempest-AttachInterfacesUnderV243Test-1728856832 tempest-AttachInterfacesUnderV243Test-1728856832-project-member] Lock "327a282e-b502-4644-a152-0e77ec399fe7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.353229] env[62476]: DEBUG nova.network.neutron [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Successfully created port: 8fb51952-70fd-40ff-85e2-121efe9acaf8 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 680.270654] env[62476]: DEBUG nova.network.neutron [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Successfully updated port: 8fb51952-70fd-40ff-85e2-121efe9acaf8 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 680.294694] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "refresh_cache-a918c107-526d-4cb7-a7dd-735a7d6420a4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.295548] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquired lock "refresh_cache-a918c107-526d-4cb7-a7dd-735a7d6420a4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.296026] env[62476]: DEBUG nova.network.neutron [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 680.363925] env[62476]: DEBUG nova.network.neutron [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 680.996015] env[62476]: DEBUG nova.network.neutron [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Updating instance_info_cache with network_info: [{"id": "8fb51952-70fd-40ff-85e2-121efe9acaf8", "address": "fa:16:3e:a0:8f:91", "network": {"id": "c3805f6e-10c5-494e-ba39-480d3c8914dd", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-489017107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2e4aa6fb2247a9adde574f155bb4d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fb51952-70", "ovs_interfaceid": "8fb51952-70fd-40ff-85e2-121efe9acaf8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.018417] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Releasing lock "refresh_cache-a918c107-526d-4cb7-a7dd-735a7d6420a4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.018417] env[62476]: DEBUG nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Instance network_info: |[{"id": "8fb51952-70fd-40ff-85e2-121efe9acaf8", "address": "fa:16:3e:a0:8f:91", "network": {"id": "c3805f6e-10c5-494e-ba39-480d3c8914dd", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-489017107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2e4aa6fb2247a9adde574f155bb4d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fb51952-70", "ovs_interfaceid": "8fb51952-70fd-40ff-85e2-121efe9acaf8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 681.018735] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:8f:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8fb51952-70fd-40ff-85e2-121efe9acaf8', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 681.035530] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Creating folder: Project (2d2e4aa6fb2247a9adde574f155bb4d2). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 681.036513] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9766a5f7-4cfa-4564-b2e9-bb7e0748a7ea {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.052035] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Created folder: Project (2d2e4aa6fb2247a9adde574f155bb4d2) in parent group-v849485. [ 681.052035] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Creating folder: Instances. Parent ref: group-v849516. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 681.053934] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14ca74d5-d283-4568-b261-12d5502507c1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.067143] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Created folder: Instances in parent group-v849516. [ 681.067633] env[62476]: DEBUG oslo.service.loopingcall [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 681.068410] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 681.068822] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7c796b9-f21d-4e15-a1ff-89ccd103443e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.104016] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 681.104016] env[62476]: value = "task-4319033" [ 681.104016] env[62476]: _type = "Task" [ 681.104016] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.113125] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319033, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.508880] env[62476]: DEBUG nova.compute.manager [req-8dfdbff7-231d-4140-82f4-4c7b4e8349fe req-3630c919-aa51-4f67-97d7-2f3ff7f3ef6e service nova] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Received event network-vif-plugged-8fb51952-70fd-40ff-85e2-121efe9acaf8 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 681.509220] env[62476]: DEBUG oslo_concurrency.lockutils [req-8dfdbff7-231d-4140-82f4-4c7b4e8349fe req-3630c919-aa51-4f67-97d7-2f3ff7f3ef6e service nova] Acquiring lock "a918c107-526d-4cb7-a7dd-735a7d6420a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.509341] env[62476]: DEBUG oslo_concurrency.lockutils [req-8dfdbff7-231d-4140-82f4-4c7b4e8349fe req-3630c919-aa51-4f67-97d7-2f3ff7f3ef6e service nova] Lock "a918c107-526d-4cb7-a7dd-735a7d6420a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.509511] env[62476]: DEBUG oslo_concurrency.lockutils [req-8dfdbff7-231d-4140-82f4-4c7b4e8349fe req-3630c919-aa51-4f67-97d7-2f3ff7f3ef6e service nova] Lock "a918c107-526d-4cb7-a7dd-735a7d6420a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.509688] env[62476]: DEBUG nova.compute.manager [req-8dfdbff7-231d-4140-82f4-4c7b4e8349fe req-3630c919-aa51-4f67-97d7-2f3ff7f3ef6e service nova] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] No waiting events found dispatching network-vif-plugged-8fb51952-70fd-40ff-85e2-121efe9acaf8 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 681.509854] env[62476]: WARNING nova.compute.manager [req-8dfdbff7-231d-4140-82f4-4c7b4e8349fe req-3630c919-aa51-4f67-97d7-2f3ff7f3ef6e service nova] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Received unexpected event network-vif-plugged-8fb51952-70fd-40ff-85e2-121efe9acaf8 for instance with vm_state building and task_state spawning. [ 681.619808] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319033, 'name': CreateVM_Task, 'duration_secs': 0.376325} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.619974] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 681.621663] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.621663] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.621834] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 681.622127] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-499f89d9-9ec5-466e-95f3-da5f1fe679f6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.629359] env[62476]: DEBUG oslo_vmware.api [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Waiting for the task: (returnval){ [ 681.629359] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]527da508-5d33-0ca8-9c90-94882d69deb9" [ 681.629359] env[62476]: _type = "Task" [ 681.629359] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.643530] env[62476]: DEBUG oslo_vmware.api [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]527da508-5d33-0ca8-9c90-94882d69deb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.140015] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.140387] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 682.140641] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.106456] env[62476]: DEBUG nova.compute.manager [req-6048f194-d04d-4ec2-8498-f2d4988c4df3 req-c7332ebe-26e0-4caf-80c4-9fca194e220f service nova] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Received event network-changed-8fb51952-70fd-40ff-85e2-121efe9acaf8 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 685.106742] env[62476]: DEBUG nova.compute.manager [req-6048f194-d04d-4ec2-8498-f2d4988c4df3 req-c7332ebe-26e0-4caf-80c4-9fca194e220f service nova] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Refreshing instance network info cache due to event network-changed-8fb51952-70fd-40ff-85e2-121efe9acaf8. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 685.107407] env[62476]: DEBUG oslo_concurrency.lockutils [req-6048f194-d04d-4ec2-8498-f2d4988c4df3 req-c7332ebe-26e0-4caf-80c4-9fca194e220f service nova] Acquiring lock "refresh_cache-a918c107-526d-4cb7-a7dd-735a7d6420a4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.107649] env[62476]: DEBUG oslo_concurrency.lockutils [req-6048f194-d04d-4ec2-8498-f2d4988c4df3 req-c7332ebe-26e0-4caf-80c4-9fca194e220f service nova] Acquired lock "refresh_cache-a918c107-526d-4cb7-a7dd-735a7d6420a4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.107734] env[62476]: DEBUG nova.network.neutron [req-6048f194-d04d-4ec2-8498-f2d4988c4df3 req-c7332ebe-26e0-4caf-80c4-9fca194e220f service nova] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Refreshing network info cache for port 8fb51952-70fd-40ff-85e2-121efe9acaf8 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 685.139373] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5a45573f-1ac1-46cf-8348-0ccec7ad0efa tempest-ServerRescueTestJSON-824594041 tempest-ServerRescueTestJSON-824594041-project-member] Acquiring lock "4770186f-8bd4-455c-a21d-f79e2230fa4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.141624] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5a45573f-1ac1-46cf-8348-0ccec7ad0efa tempest-ServerRescueTestJSON-824594041 tempest-ServerRescueTestJSON-824594041-project-member] Lock "4770186f-8bd4-455c-a21d-f79e2230fa4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.661628] env[62476]: DEBUG nova.network.neutron [req-6048f194-d04d-4ec2-8498-f2d4988c4df3 req-c7332ebe-26e0-4caf-80c4-9fca194e220f service nova] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Updated VIF entry in instance network info cache for port 8fb51952-70fd-40ff-85e2-121efe9acaf8. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 685.661754] env[62476]: DEBUG nova.network.neutron [req-6048f194-d04d-4ec2-8498-f2d4988c4df3 req-c7332ebe-26e0-4caf-80c4-9fca194e220f service nova] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Updating instance_info_cache with network_info: [{"id": "8fb51952-70fd-40ff-85e2-121efe9acaf8", "address": "fa:16:3e:a0:8f:91", "network": {"id": "c3805f6e-10c5-494e-ba39-480d3c8914dd", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-489017107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2e4aa6fb2247a9adde574f155bb4d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fb51952-70", "ovs_interfaceid": "8fb51952-70fd-40ff-85e2-121efe9acaf8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.680211] env[62476]: DEBUG oslo_concurrency.lockutils [req-6048f194-d04d-4ec2-8498-f2d4988c4df3 req-c7332ebe-26e0-4caf-80c4-9fca194e220f service nova] Releasing lock "refresh_cache-a918c107-526d-4cb7-a7dd-735a7d6420a4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.346241] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2c2db410-b875-4528-a636-0e8704c6a1bf tempest-ImagesOneServerNegativeTestJSON-1106775302 tempest-ImagesOneServerNegativeTestJSON-1106775302-project-member] Acquiring lock "c60488a7-7d3b-49af-8b4d-9aad718a37a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.346241] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2c2db410-b875-4528-a636-0e8704c6a1bf tempest-ImagesOneServerNegativeTestJSON-1106775302 tempest-ImagesOneServerNegativeTestJSON-1106775302-project-member] Lock "c60488a7-7d3b-49af-8b4d-9aad718a37a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.571564] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.571685] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.607303] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.608078] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.608304] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 687.027636] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 687.028224] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 687.042815] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.042971] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.043076] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.043215] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 687.045076] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31620132-b240-4e49-a7f1-a6af24636afd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.062067] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad590e9a-f2f8-40a9-a29e-a2dc164c1bdf {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.075836] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862afd16-a1c4-4fc6-8930-2c2ea6883983 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.089529] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43b2e00-42ec-47b4-aa32-abc1d44daf53 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.126082] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180630MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 687.126243] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.126467] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.222715] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ae5723f6-0107-46e8-971d-fca307ce67c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.223115] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance adf2f380-84ad-480b-aa9a-16b19c05a3f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.223115] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 187242f5-934b-4c1d-b8ac-2ce8c347351a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.223270] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.223308] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6c3f0540-a722-4a13-9982-f40c2d6ce9b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.223521] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 760f3c9b-044d-4593-bc97-535ac09c3f3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.223576] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1323e67f-17c6-4432-8eea-98c285745766 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.223658] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance d7ec9b10-5975-4148-9931-3e7b0999b373 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.223773] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e41d1a8c-ad7e-4151-9745-04318b007dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.224326] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a918c107-526d-4cb7-a7dd-735a7d6420a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.260940] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ebd0c337-82cd-4d0a-9089-b9e2c72c417d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.287733] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 029e3122-7587-4675-b9d9-47cf8ffdbd1d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.307069] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.319326] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a9eb98c9-73cd-453f-9d6f-f6fe5e0a6c0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.335640] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance d271fa1d-d7f3-4abf-9b5f-69396c4c128c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.346610] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a0b1a835-21b9-4d9d-a5d7-35479a8c4b6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.361656] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance da5c9742-9dba-4691-9bda-25858915857b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.380986] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3d2bc5b9-cef1-4c45-a493-00a89bd2dfcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.395029] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ab7da5f4-9460-4d70-a0e5-5a690284d0e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.409634] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 380bc9c3-8bba-4f26-b938-e4e74543261c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.438951] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 971698f2-c127-4f21-ae3f-3bb863742982 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.451094] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0561164b-f3f9-446f-b597-4b6d16a32a00 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.464469] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 327a282e-b502-4644-a152-0e77ec399fe7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.475607] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4770186f-8bd4-455c-a21d-f79e2230fa4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.489931] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance c60488a7-7d3b-49af-8b4d-9aad718a37a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.490354] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 687.490442] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 687.909138] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd95db93-5bc2-464a-aa79-cf80a72ad0bd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.919652] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fceba05-e512-4113-bf98-9d359f87fa27 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.952164] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aaca18d-2826-4478-b11d-94ecaa07ec2c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.960709] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7820b03e-2841-4a8e-94cc-87554b6375ae {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.976118] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.985186] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 688.002816] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 688.003395] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.876s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.002591] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.002858] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 689.002901] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 689.036264] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 689.036504] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 689.036664] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 689.036816] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 689.037171] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 689.037171] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 689.037247] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 689.037380] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 689.037458] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 689.040750] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 689.040750] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 689.040750] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.040750] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.040750] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.500658] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0a4fd872-d827-468d-ae65-eb6f82c93d5b tempest-VolumesAdminNegativeTest-1065309950 tempest-VolumesAdminNegativeTest-1065309950-project-member] Acquiring lock "02f37f91-5ee0-46bb-a5e2-ec8256c1f22c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.500903] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0a4fd872-d827-468d-ae65-eb6f82c93d5b tempest-VolumesAdminNegativeTest-1065309950 tempest-VolumesAdminNegativeTest-1065309950-project-member] Lock "02f37f91-5ee0-46bb-a5e2-ec8256c1f22c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.522404] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cd51b49c-ea51-45e1-864e-6014e6a9e45c tempest-ServersAdminNegativeTestJSON-35601590 tempest-ServersAdminNegativeTestJSON-35601590-project-member] Acquiring lock "9ea6880c-469b-4c66-927e-442a41e22163" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.522795] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cd51b49c-ea51-45e1-864e-6014e6a9e45c tempest-ServersAdminNegativeTestJSON-35601590 tempest-ServersAdminNegativeTestJSON-35601590-project-member] Lock "9ea6880c-469b-4c66-927e-442a41e22163" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.334787] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c0b4a918-7c0c-4a85-8d45-3f47506313ad tempest-ServerDiagnosticsV248Test-1209440432 tempest-ServerDiagnosticsV248Test-1209440432-project-member] Acquiring lock "7c661fb6-abb7-486b-9188-f8d4dd6bb1a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.335077] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c0b4a918-7c0c-4a85-8d45-3f47506313ad tempest-ServerDiagnosticsV248Test-1209440432 tempest-ServerDiagnosticsV248Test-1209440432-project-member] Lock "7c661fb6-abb7-486b-9188-f8d4dd6bb1a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.015948] env[62476]: DEBUG oslo_concurrency.lockutils [None req-387d0014-0337-49ad-845e-0d4112a73d6f tempest-ServerDiagnosticsNegativeTest-163132470 tempest-ServerDiagnosticsNegativeTest-163132470-project-member] Acquiring lock "7620aead-4244-47ac-be0a-6614d03ec2c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.015948] env[62476]: DEBUG oslo_concurrency.lockutils [None req-387d0014-0337-49ad-845e-0d4112a73d6f tempest-ServerDiagnosticsNegativeTest-163132470 tempest-ServerDiagnosticsNegativeTest-163132470-project-member] Lock "7620aead-4244-47ac-be0a-6614d03ec2c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.417961] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5b1017b3-8272-45fc-8ecd-57c0aa6a49d1 tempest-InstanceActionsV221TestJSON-386317821 tempest-InstanceActionsV221TestJSON-386317821-project-member] Acquiring lock "8ce40d44-062c-47cf-be36-d8ed6d924094" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.417961] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5b1017b3-8272-45fc-8ecd-57c0aa6a49d1 tempest-InstanceActionsV221TestJSON-386317821 tempest-InstanceActionsV221TestJSON-386317821-project-member] Lock "8ce40d44-062c-47cf-be36-d8ed6d924094" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.245622] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9ef8f4ca-9438-414b-b2f8-1f94a82c819e tempest-ServersTestBootFromVolume-495427889 tempest-ServersTestBootFromVolume-495427889-project-member] Acquiring lock "a2008090-914b-448c-8c60-776d4032e091" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.245977] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9ef8f4ca-9438-414b-b2f8-1f94a82c819e tempest-ServersTestBootFromVolume-495427889 tempest-ServersTestBootFromVolume-495427889-project-member] Lock "a2008090-914b-448c-8c60-776d4032e091" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.011874] env[62476]: WARNING oslo_vmware.rw_handles [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 720.011874] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 720.011874] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 720.011874] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 720.011874] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 720.011874] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 720.011874] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 720.011874] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 720.011874] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 720.011874] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 720.011874] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 720.011874] env[62476]: ERROR oslo_vmware.rw_handles [ 720.011874] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/8dedca10-3835-4a79-904b-57cb3ccb22bd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 720.012572] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 720.012572] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Copying Virtual Disk [datastore1] vmware_temp/8dedca10-3835-4a79-904b-57cb3ccb22bd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/8dedca10-3835-4a79-904b-57cb3ccb22bd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 720.012572] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11b6d164-a376-46ff-8833-973e6f13c296 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.020049] env[62476]: DEBUG oslo_vmware.api [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Waiting for the task: (returnval){ [ 720.020049] env[62476]: value = "task-4319045" [ 720.020049] env[62476]: _type = "Task" [ 720.020049] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.028729] env[62476]: DEBUG oslo_vmware.api [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Task: {'id': task-4319045, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.531194] env[62476]: DEBUG oslo_vmware.exceptions [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 720.532046] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.532125] env[62476]: ERROR nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 720.532125] env[62476]: Faults: ['InvalidArgument'] [ 720.532125] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Traceback (most recent call last): [ 720.532125] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 720.532125] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] yield resources [ 720.532125] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 720.532125] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] self.driver.spawn(context, instance, image_meta, [ 720.532125] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 720.532125] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 720.532125] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 720.532125] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] self._fetch_image_if_missing(context, vi) [ 720.532125] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 720.532420] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] image_cache(vi, tmp_image_ds_loc) [ 720.532420] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 720.532420] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] vm_util.copy_virtual_disk( [ 720.532420] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 720.532420] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] session._wait_for_task(vmdk_copy_task) [ 720.532420] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 720.532420] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] return self.wait_for_task(task_ref) [ 720.532420] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 720.532420] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] return evt.wait() [ 720.532420] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 720.532420] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] result = hub.switch() [ 720.532420] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 720.532420] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] return self.greenlet.switch() [ 720.532726] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 720.532726] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] self.f(*self.args, **self.kw) [ 720.532726] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 720.532726] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] raise exceptions.translate_fault(task_info.error) [ 720.532726] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 720.532726] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Faults: ['InvalidArgument'] [ 720.532726] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] [ 720.532726] env[62476]: INFO nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Terminating instance [ 720.533995] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.534210] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 720.534451] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3021d0d-6b29-444f-96cb-0fc3c47896ea {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.536674] env[62476]: DEBUG nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 720.536870] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 720.537613] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c665f2a-e3d0-4b8a-99da-5ee9b73cb706 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.544928] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 720.545179] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e884b33b-a986-4c03-9809-d76680977361 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.547780] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 720.547957] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 720.548637] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7eceb287-1df2-40ca-8b93-5a860a07f4c1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.554059] env[62476]: DEBUG oslo_vmware.api [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Waiting for the task: (returnval){ [ 720.554059] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]5246e035-1d25-5bd2-6685-f9cf0b3973e3" [ 720.554059] env[62476]: _type = "Task" [ 720.554059] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.570682] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 720.571088] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Creating directory with path [datastore1] vmware_temp/080f63b1-88ec-4e1a-ad65-00d506a54efd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 720.571438] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8f9b304-b0d6-4a65-b055-03079971e240 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.596015] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Created directory with path [datastore1] vmware_temp/080f63b1-88ec-4e1a-ad65-00d506a54efd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 720.596385] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Fetch image to [datastore1] vmware_temp/080f63b1-88ec-4e1a-ad65-00d506a54efd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 720.596677] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/080f63b1-88ec-4e1a-ad65-00d506a54efd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 720.597874] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84f3944-51d2-4f3c-89a4-1f40fbde97ef {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.607018] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd5ae26-9d21-4737-94e5-1e891471f7a9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.619316] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c82584-18e9-436e-b9d7-01d8fdfb8856 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.652276] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf304ea9-3e7c-44a0-ba66-267f5c1272de {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.658961] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8541c8b1-3523-4d2e-bb8b-e9f3f4fa169e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.683888] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 720.738265] env[62476]: DEBUG oslo_vmware.rw_handles [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/080f63b1-88ec-4e1a-ad65-00d506a54efd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 720.799236] env[62476]: DEBUG oslo_vmware.rw_handles [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 720.799450] env[62476]: DEBUG oslo_vmware.rw_handles [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/080f63b1-88ec-4e1a-ad65-00d506a54efd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 721.394063] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 721.394063] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 721.394063] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Deleting the datastore file [datastore1] ae5723f6-0107-46e8-971d-fca307ce67c8 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 721.394063] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9276ec94-42b7-4119-bc03-a078fc8ee5d4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.400636] env[62476]: DEBUG oslo_vmware.api [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Waiting for the task: (returnval){ [ 721.400636] env[62476]: value = "task-4319047" [ 721.400636] env[62476]: _type = "Task" [ 721.400636] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.409908] env[62476]: DEBUG oslo_vmware.api [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Task: {'id': task-4319047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.910484] env[62476]: DEBUG oslo_vmware.api [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Task: {'id': task-4319047, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115459} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.910870] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 721.911062] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 721.911256] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 721.911407] env[62476]: INFO nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Took 1.37 seconds to destroy the instance on the hypervisor. [ 721.913552] env[62476]: DEBUG nova.compute.claims [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 721.913732] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.913943] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.410463] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d29882-05ed-4d37-a474-d17a6d39b202 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.418459] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfd300a-3e3e-4848-88df-feb22e92de52 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.449977] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8435db-d199-4c99-855c-6bcfd4ae6254 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.458296] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7c6188-e610-460b-b63c-d86bd2713977 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.472572] env[62476]: DEBUG nova.compute.provider_tree [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.482965] env[62476]: DEBUG nova.scheduler.client.report [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 722.498460] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.584s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.499064] env[62476]: ERROR nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 722.499064] env[62476]: Faults: ['InvalidArgument'] [ 722.499064] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Traceback (most recent call last): [ 722.499064] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 722.499064] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] self.driver.spawn(context, instance, image_meta, [ 722.499064] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 722.499064] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 722.499064] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 722.499064] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] self._fetch_image_if_missing(context, vi) [ 722.499064] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 722.499064] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] image_cache(vi, tmp_image_ds_loc) [ 722.499064] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 722.499358] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] vm_util.copy_virtual_disk( [ 722.499358] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 722.499358] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] session._wait_for_task(vmdk_copy_task) [ 722.499358] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 722.499358] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] return self.wait_for_task(task_ref) [ 722.499358] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 722.499358] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] return evt.wait() [ 722.499358] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 722.499358] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] result = hub.switch() [ 722.499358] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 722.499358] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] return self.greenlet.switch() [ 722.499358] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 722.499358] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] self.f(*self.args, **self.kw) [ 722.499613] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 722.499613] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] raise exceptions.translate_fault(task_info.error) [ 722.499613] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 722.499613] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Faults: ['InvalidArgument'] [ 722.499613] env[62476]: ERROR nova.compute.manager [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] [ 722.499859] env[62476]: DEBUG nova.compute.utils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 722.502441] env[62476]: DEBUG nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Build of instance ae5723f6-0107-46e8-971d-fca307ce67c8 was re-scheduled: A specified parameter was not correct: fileType [ 722.502441] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 722.502864] env[62476]: DEBUG nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 722.503056] env[62476]: DEBUG nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 722.503217] env[62476]: DEBUG nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 722.503383] env[62476]: DEBUG nova.network.neutron [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 723.123735] env[62476]: DEBUG nova.network.neutron [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.135412] env[62476]: INFO nova.compute.manager [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: ae5723f6-0107-46e8-971d-fca307ce67c8] Took 0.63 seconds to deallocate network for instance. [ 723.273170] env[62476]: INFO nova.scheduler.client.report [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Deleted allocations for instance ae5723f6-0107-46e8-971d-fca307ce67c8 [ 723.310671] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93c8df1b-7255-442b-8c34-be6de88d8c1d tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "ae5723f6-0107-46e8-971d-fca307ce67c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.525s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.329285] env[62476]: DEBUG nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 723.378646] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.378905] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.380440] env[62476]: INFO nova.compute.claims [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.832929] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c547a74-7735-417d-a394-b240c31b216f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.841105] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5f5791-78bc-4600-9ce0-b25c111295f9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.872316] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88da9bed-ba04-440b-b154-c2c05d39f161 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.880393] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce43b74-6ef0-42f6-b652-d52bc4bc14e7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.894527] env[62476]: DEBUG nova.compute.provider_tree [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.903958] env[62476]: DEBUG nova.scheduler.client.report [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 723.923117] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.544s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.923292] env[62476]: DEBUG nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 723.961339] env[62476]: DEBUG nova.compute.utils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 723.962906] env[62476]: DEBUG nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 723.962906] env[62476]: DEBUG nova.network.neutron [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 723.981228] env[62476]: DEBUG nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 724.056292] env[62476]: DEBUG nova.policy [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9766451a6ad6400e9de77ab77cfbccce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fffb0483f5b24640b59fdf6b6bf5b4b7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 724.067896] env[62476]: DEBUG nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 724.105387] env[62476]: DEBUG nova.virt.hardware [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 724.106119] env[62476]: DEBUG nova.virt.hardware [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 724.106540] env[62476]: DEBUG nova.virt.hardware [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.107122] env[62476]: DEBUG nova.virt.hardware [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 724.107622] env[62476]: DEBUG nova.virt.hardware [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.108022] env[62476]: DEBUG nova.virt.hardware [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 724.109011] env[62476]: DEBUG nova.virt.hardware [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 724.109266] env[62476]: DEBUG nova.virt.hardware [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 724.109707] env[62476]: DEBUG nova.virt.hardware [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 724.110181] env[62476]: DEBUG nova.virt.hardware [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 724.110861] env[62476]: DEBUG nova.virt.hardware [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 724.112681] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93504140-d981-4487-9b75-6be2a1d34637 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.125182] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d454c796-04d7-477c-bfcc-6268a22e35c7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.569159] env[62476]: DEBUG nova.network.neutron [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Successfully created port: 588a23c5-0770-4078-b269-fa7fd3841fd2 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 725.087952] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "1e005b4d-7f94-4263-ba5d-303af209c408" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.088281] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "1e005b4d-7f94-4263-ba5d-303af209c408" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.526311] env[62476]: DEBUG nova.network.neutron [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Successfully updated port: 588a23c5-0770-4078-b269-fa7fd3841fd2 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 725.535410] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "refresh_cache-ebd0c337-82cd-4d0a-9089-b9e2c72c417d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.535561] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquired lock "refresh_cache-ebd0c337-82cd-4d0a-9089-b9e2c72c417d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.535743] env[62476]: DEBUG nova.network.neutron [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 725.617678] env[62476]: DEBUG nova.network.neutron [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 725.734558] env[62476]: DEBUG nova.compute.manager [req-eb868be3-eff1-4c57-9e75-0417dca29e9c req-82a5a56d-0662-405a-ab2e-d4a8aabfeabc service nova] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Received event network-vif-plugged-588a23c5-0770-4078-b269-fa7fd3841fd2 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 725.734767] env[62476]: DEBUG oslo_concurrency.lockutils [req-eb868be3-eff1-4c57-9e75-0417dca29e9c req-82a5a56d-0662-405a-ab2e-d4a8aabfeabc service nova] Acquiring lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.734977] env[62476]: DEBUG oslo_concurrency.lockutils [req-eb868be3-eff1-4c57-9e75-0417dca29e9c req-82a5a56d-0662-405a-ab2e-d4a8aabfeabc service nova] Lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.735169] env[62476]: DEBUG oslo_concurrency.lockutils [req-eb868be3-eff1-4c57-9e75-0417dca29e9c req-82a5a56d-0662-405a-ab2e-d4a8aabfeabc service nova] Lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.735337] env[62476]: DEBUG nova.compute.manager [req-eb868be3-eff1-4c57-9e75-0417dca29e9c req-82a5a56d-0662-405a-ab2e-d4a8aabfeabc service nova] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] No waiting events found dispatching network-vif-plugged-588a23c5-0770-4078-b269-fa7fd3841fd2 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 725.735502] env[62476]: WARNING nova.compute.manager [req-eb868be3-eff1-4c57-9e75-0417dca29e9c req-82a5a56d-0662-405a-ab2e-d4a8aabfeabc service nova] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Received unexpected event network-vif-plugged-588a23c5-0770-4078-b269-fa7fd3841fd2 for instance with vm_state building and task_state spawning. [ 725.873114] env[62476]: DEBUG nova.network.neutron [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Updating instance_info_cache with network_info: [{"id": "588a23c5-0770-4078-b269-fa7fd3841fd2", "address": "fa:16:3e:81:1b:78", "network": {"id": "e4c40932-e3ae-4a9e-8be0-03603b3183a1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1410291433-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fffb0483f5b24640b59fdf6b6bf5b4b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap588a23c5-07", "ovs_interfaceid": "588a23c5-0770-4078-b269-fa7fd3841fd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.890505] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Releasing lock "refresh_cache-ebd0c337-82cd-4d0a-9089-b9e2c72c417d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.890900] env[62476]: DEBUG nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Instance network_info: |[{"id": "588a23c5-0770-4078-b269-fa7fd3841fd2", "address": "fa:16:3e:81:1b:78", "network": {"id": "e4c40932-e3ae-4a9e-8be0-03603b3183a1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1410291433-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fffb0483f5b24640b59fdf6b6bf5b4b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap588a23c5-07", "ovs_interfaceid": "588a23c5-0770-4078-b269-fa7fd3841fd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 725.891372] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:1b:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3291573-fad8-48cc-a965-c3554e7cee4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '588a23c5-0770-4078-b269-fa7fd3841fd2', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 725.899079] env[62476]: DEBUG oslo.service.loopingcall [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.899622] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 725.899863] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a4efdc1-ec75-4b7d-9e1a-026f3a2d5bd5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.920433] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 725.920433] env[62476]: value = "task-4319048" [ 725.920433] env[62476]: _type = "Task" [ 725.920433] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.929552] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319048, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.430468] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319048, 'name': CreateVM_Task} progress is 99%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.931434] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319048, 'name': CreateVM_Task} progress is 99%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.432495] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319048, 'name': CreateVM_Task} progress is 99%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.904573] env[62476]: DEBUG nova.compute.manager [req-7d61fd03-f2dd-4a7d-9e67-5e144eb736f3 req-8042702d-fe76-443b-a6a6-1575006fec2f service nova] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Received event network-changed-588a23c5-0770-4078-b269-fa7fd3841fd2 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 727.904805] env[62476]: DEBUG nova.compute.manager [req-7d61fd03-f2dd-4a7d-9e67-5e144eb736f3 req-8042702d-fe76-443b-a6a6-1575006fec2f service nova] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Refreshing instance network info cache due to event network-changed-588a23c5-0770-4078-b269-fa7fd3841fd2. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 727.905041] env[62476]: DEBUG oslo_concurrency.lockutils [req-7d61fd03-f2dd-4a7d-9e67-5e144eb736f3 req-8042702d-fe76-443b-a6a6-1575006fec2f service nova] Acquiring lock "refresh_cache-ebd0c337-82cd-4d0a-9089-b9e2c72c417d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.905193] env[62476]: DEBUG oslo_concurrency.lockutils [req-7d61fd03-f2dd-4a7d-9e67-5e144eb736f3 req-8042702d-fe76-443b-a6a6-1575006fec2f service nova] Acquired lock "refresh_cache-ebd0c337-82cd-4d0a-9089-b9e2c72c417d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.905357] env[62476]: DEBUG nova.network.neutron [req-7d61fd03-f2dd-4a7d-9e67-5e144eb736f3 req-8042702d-fe76-443b-a6a6-1575006fec2f service nova] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Refreshing network info cache for port 588a23c5-0770-4078-b269-fa7fd3841fd2 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 727.933262] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319048, 'name': CreateVM_Task, 'duration_secs': 1.596789} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.933419] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 727.937022] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.937022] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.937022] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 727.937022] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9eac322-83bd-4d71-a444-4071007b8ca2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.940421] env[62476]: DEBUG oslo_vmware.api [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Waiting for the task: (returnval){ [ 727.940421] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]529004ef-2811-6e71-53bf-af6266581d9e" [ 727.940421] env[62476]: _type = "Task" [ 727.940421] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.949131] env[62476]: DEBUG oslo_vmware.api [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]529004ef-2811-6e71-53bf-af6266581d9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.272704] env[62476]: DEBUG nova.network.neutron [req-7d61fd03-f2dd-4a7d-9e67-5e144eb736f3 req-8042702d-fe76-443b-a6a6-1575006fec2f service nova] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Updated VIF entry in instance network info cache for port 588a23c5-0770-4078-b269-fa7fd3841fd2. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 728.273111] env[62476]: DEBUG nova.network.neutron [req-7d61fd03-f2dd-4a7d-9e67-5e144eb736f3 req-8042702d-fe76-443b-a6a6-1575006fec2f service nova] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Updating instance_info_cache with network_info: [{"id": "588a23c5-0770-4078-b269-fa7fd3841fd2", "address": "fa:16:3e:81:1b:78", "network": {"id": "e4c40932-e3ae-4a9e-8be0-03603b3183a1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1410291433-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fffb0483f5b24640b59fdf6b6bf5b4b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap588a23c5-07", "ovs_interfaceid": "588a23c5-0770-4078-b269-fa7fd3841fd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.282750] env[62476]: DEBUG oslo_concurrency.lockutils [req-7d61fd03-f2dd-4a7d-9e67-5e144eb736f3 req-8042702d-fe76-443b-a6a6-1575006fec2f service nova] Releasing lock "refresh_cache-ebd0c337-82cd-4d0a-9089-b9e2c72c417d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.453052] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.453329] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 728.453522] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.027035] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.027366] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.027543] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.027688] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 747.027488] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 747.042838] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.043137] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.043376] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.043548] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 747.044717] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a9887c-6950-42db-8ef9-347ce8763cb9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.054322] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf788aa-27d9-4710-b919-6bd8fb2fec25 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.069014] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ee6b87-6160-4304-ac61-3ad5d1b04317 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.076637] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a680fe8d-668f-4c3c-bea5-326fca0a36ce {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.106123] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180697MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 747.106280] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.106524] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.187484] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance adf2f380-84ad-480b-aa9a-16b19c05a3f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.187619] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 187242f5-934b-4c1d-b8ac-2ce8c347351a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.188298] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.188298] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6c3f0540-a722-4a13-9982-f40c2d6ce9b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.188298] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 760f3c9b-044d-4593-bc97-535ac09c3f3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.188298] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1323e67f-17c6-4432-8eea-98c285745766 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.188464] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance d7ec9b10-5975-4148-9931-3e7b0999b373 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.188464] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e41d1a8c-ad7e-4151-9745-04318b007dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.188729] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a918c107-526d-4cb7-a7dd-735a7d6420a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.188729] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ebd0c337-82cd-4d0a-9089-b9e2c72c417d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.201821] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 029e3122-7587-4675-b9d9-47cf8ffdbd1d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.214295] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.226141] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a9eb98c9-73cd-453f-9d6f-f6fe5e0a6c0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.236458] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance d271fa1d-d7f3-4abf-9b5f-69396c4c128c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.249028] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a0b1a835-21b9-4d9d-a5d7-35479a8c4b6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.256813] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance da5c9742-9dba-4691-9bda-25858915857b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.267892] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3d2bc5b9-cef1-4c45-a493-00a89bd2dfcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.278507] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ab7da5f4-9460-4d70-a0e5-5a690284d0e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.288891] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 380bc9c3-8bba-4f26-b938-e4e74543261c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.299129] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 971698f2-c127-4f21-ae3f-3bb863742982 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.309516] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0561164b-f3f9-446f-b597-4b6d16a32a00 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.321401] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 327a282e-b502-4644-a152-0e77ec399fe7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.333992] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4770186f-8bd4-455c-a21d-f79e2230fa4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.345123] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance c60488a7-7d3b-49af-8b4d-9aad718a37a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.360447] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 02f37f91-5ee0-46bb-a5e2-ec8256c1f22c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.372709] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 9ea6880c-469b-4c66-927e-442a41e22163 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.383482] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7c661fb6-abb7-486b-9188-f8d4dd6bb1a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.394864] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7620aead-4244-47ac-be0a-6614d03ec2c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.405179] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 8ce40d44-062c-47cf-be36-d8ed6d924094 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.417297] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a2008090-914b-448c-8c60-776d4032e091 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.429341] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1e005b4d-7f94-4263-ba5d-303af209c408 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.429592] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 747.429740] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 747.821978] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374d5700-341c-42f2-955b-8cbf25bdc2ad {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.830170] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976f1560-f44b-4a77-af34-99bfbe896b4a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.861434] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26773e7f-4bcb-45ad-854d-9ef72939bd38 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.872058] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e20094-eec5-4d30-b964-1bd10fc46774 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.883703] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.898636] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 747.914206] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 747.914425] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.808s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.914297] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 748.914618] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.027528] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 749.027528] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 750.027871] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 750.028181] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 750.028181] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 750.048050] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 750.048050] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 750.048211] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 750.048338] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 750.048464] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 750.048590] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 750.048714] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 750.048836] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 750.048955] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 750.049088] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 750.049210] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 769.268649] env[62476]: WARNING oslo_vmware.rw_handles [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 769.268649] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 769.268649] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 769.268649] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 769.268649] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 769.268649] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 769.268649] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 769.268649] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 769.268649] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 769.268649] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 769.268649] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 769.268649] env[62476]: ERROR oslo_vmware.rw_handles [ 769.269307] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/080f63b1-88ec-4e1a-ad65-00d506a54efd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 769.270746] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 769.270995] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Copying Virtual Disk [datastore1] vmware_temp/080f63b1-88ec-4e1a-ad65-00d506a54efd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/080f63b1-88ec-4e1a-ad65-00d506a54efd/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 769.271335] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d52a0906-9e12-40d1-8efb-c282142a0c0a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.280523] env[62476]: DEBUG oslo_vmware.api [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Waiting for the task: (returnval){ [ 769.280523] env[62476]: value = "task-4319049" [ 769.280523] env[62476]: _type = "Task" [ 769.280523] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.288686] env[62476]: DEBUG oslo_vmware.api [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Task: {'id': task-4319049, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.791878] env[62476]: DEBUG oslo_vmware.exceptions [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 769.792223] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.792784] env[62476]: ERROR nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 769.792784] env[62476]: Faults: ['InvalidArgument'] [ 769.792784] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Traceback (most recent call last): [ 769.792784] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 769.792784] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] yield resources [ 769.792784] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 769.792784] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] self.driver.spawn(context, instance, image_meta, [ 769.792784] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 769.792784] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 769.792784] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 769.792784] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] self._fetch_image_if_missing(context, vi) [ 769.792784] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 769.793867] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] image_cache(vi, tmp_image_ds_loc) [ 769.793867] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 769.793867] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] vm_util.copy_virtual_disk( [ 769.793867] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 769.793867] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] session._wait_for_task(vmdk_copy_task) [ 769.793867] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 769.793867] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] return self.wait_for_task(task_ref) [ 769.793867] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 769.793867] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] return evt.wait() [ 769.793867] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 769.793867] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] result = hub.switch() [ 769.793867] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 769.793867] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] return self.greenlet.switch() [ 769.794243] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 769.794243] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] self.f(*self.args, **self.kw) [ 769.794243] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 769.794243] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] raise exceptions.translate_fault(task_info.error) [ 769.794243] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 769.794243] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Faults: ['InvalidArgument'] [ 769.794243] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] [ 769.794243] env[62476]: INFO nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Terminating instance [ 769.794766] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.794974] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 769.796035] env[62476]: DEBUG nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 769.796035] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 769.796218] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76780148-6f7d-4d18-b308-e5081dde2356 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.798533] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22523e2-1260-4754-acbb-3ee14726160e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.807410] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 769.807410] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13ca6355-ebae-46b2-9ef4-7f0e75edfc6c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.809862] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 769.810107] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 769.811376] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71763ca7-b16e-4628-ba1d-87cf725c171f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.816980] env[62476]: DEBUG oslo_vmware.api [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Waiting for the task: (returnval){ [ 769.816980] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52256796-ad3a-410f-a169-84431b2b71ce" [ 769.816980] env[62476]: _type = "Task" [ 769.816980] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.829680] env[62476]: DEBUG oslo_vmware.api [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52256796-ad3a-410f-a169-84431b2b71ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.879057] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 769.879057] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 769.879057] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Deleting the datastore file [datastore1] adf2f380-84ad-480b-aa9a-16b19c05a3f3 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 769.879057] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74d7ed54-dcc3-42b2-933e-3ee733160b8f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.885848] env[62476]: DEBUG oslo_vmware.api [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Waiting for the task: (returnval){ [ 769.885848] env[62476]: value = "task-4319051" [ 769.885848] env[62476]: _type = "Task" [ 769.885848] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.894956] env[62476]: DEBUG oslo_vmware.api [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Task: {'id': task-4319051, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.328224] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 770.328495] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Creating directory with path [datastore1] vmware_temp/0ccd2385-8249-4681-a609-41a2dd03c69d/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 770.328739] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db8d4566-8784-40ee-b44e-33aae96edfb1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.342349] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Created directory with path [datastore1] vmware_temp/0ccd2385-8249-4681-a609-41a2dd03c69d/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 770.342554] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Fetch image to [datastore1] vmware_temp/0ccd2385-8249-4681-a609-41a2dd03c69d/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 770.342726] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/0ccd2385-8249-4681-a609-41a2dd03c69d/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 770.343730] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa22a55-2ca3-42bf-ad3e-db3fcbbd47e7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.350728] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684030ad-7869-4715-9cd6-0944c9993be1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.361561] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9e4632-ef9a-4891-9fb3-a1c09237e48b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.395856] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3fa8bc8-da83-437a-b92e-a82c7ff0a4a9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.403706] env[62476]: DEBUG oslo_vmware.api [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Task: {'id': task-4319051, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087956} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.405188] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 770.405391] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 770.405562] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 770.405733] env[62476]: INFO nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Took 0.61 seconds to destroy the instance on the hypervisor. [ 770.407808] env[62476]: DEBUG nova.compute.claims [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 770.407981] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.408212] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.411348] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3d698e82-9ae5-430b-a3d6-dc3b63f692ab {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.438302] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 770.497924] env[62476]: DEBUG oslo_vmware.rw_handles [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0ccd2385-8249-4681-a609-41a2dd03c69d/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 770.560313] env[62476]: DEBUG oslo_vmware.rw_handles [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 770.560534] env[62476]: DEBUG oslo_vmware.rw_handles [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0ccd2385-8249-4681-a609-41a2dd03c69d/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 770.926711] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9855158d-b5d1-4cef-81b4-bafac9b1ca3b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.934707] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130c0f53-ce69-4167-bd79-b1a705703556 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.965781] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d220dbd7-4f68-46d8-b1fc-75c1d1c21c92 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.974078] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a529cc7-1473-484d-be82-15a5413bac7d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.987830] env[62476]: DEBUG nova.compute.provider_tree [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.996748] env[62476]: DEBUG nova.scheduler.client.report [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 771.016821] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.607s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.016821] env[62476]: ERROR nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 771.016821] env[62476]: Faults: ['InvalidArgument'] [ 771.016821] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Traceback (most recent call last): [ 771.016821] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 771.016821] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] self.driver.spawn(context, instance, image_meta, [ 771.016821] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 771.016821] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 771.016821] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 771.016821] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] self._fetch_image_if_missing(context, vi) [ 771.017277] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 771.017277] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] image_cache(vi, tmp_image_ds_loc) [ 771.017277] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 771.017277] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] vm_util.copy_virtual_disk( [ 771.017277] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 771.017277] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] session._wait_for_task(vmdk_copy_task) [ 771.017277] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 771.017277] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] return self.wait_for_task(task_ref) [ 771.017277] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 771.017277] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] return evt.wait() [ 771.017277] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 771.017277] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] result = hub.switch() [ 771.017277] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 771.017618] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] return self.greenlet.switch() [ 771.017618] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 771.017618] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] self.f(*self.args, **self.kw) [ 771.017618] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 771.017618] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] raise exceptions.translate_fault(task_info.error) [ 771.017618] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 771.017618] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Faults: ['InvalidArgument'] [ 771.017618] env[62476]: ERROR nova.compute.manager [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] [ 771.017618] env[62476]: DEBUG nova.compute.utils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 771.018857] env[62476]: DEBUG nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Build of instance adf2f380-84ad-480b-aa9a-16b19c05a3f3 was re-scheduled: A specified parameter was not correct: fileType [ 771.018857] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 771.019283] env[62476]: DEBUG nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 771.019457] env[62476]: DEBUG nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 771.019607] env[62476]: DEBUG nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 771.020071] env[62476]: DEBUG nova.network.neutron [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 771.430811] env[62476]: DEBUG nova.network.neutron [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.444287] env[62476]: INFO nova.compute.manager [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] [instance: adf2f380-84ad-480b-aa9a-16b19c05a3f3] Took 0.42 seconds to deallocate network for instance. [ 771.551929] env[62476]: INFO nova.scheduler.client.report [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Deleted allocations for instance adf2f380-84ad-480b-aa9a-16b19c05a3f3 [ 771.575811] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2ba5ed02-45e6-40c9-8a95-51f8d128d322 tempest-TenantUsagesTestJSON-239026597 tempest-TenantUsagesTestJSON-239026597-project-member] Lock "adf2f380-84ad-480b-aa9a-16b19c05a3f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 153.360s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.587063] env[62476]: DEBUG nova.compute.manager [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 771.639793] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.639996] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.641567] env[62476]: INFO nova.compute.claims [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 772.119284] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d547000f-a76e-4e2b-9efa-f1868abcc571 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.127279] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d05f72c-816b-4464-9afd-f6ad84b0cd40 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.158805] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211506b9-6365-449e-b476-48fddd0159ea {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.167682] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfd5943-8296-47f9-82b7-e76b474559ca {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.181682] env[62476]: DEBUG nova.compute.provider_tree [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.193490] env[62476]: DEBUG nova.scheduler.client.report [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 772.207554] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.567s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.208070] env[62476]: DEBUG nova.compute.manager [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 772.251687] env[62476]: DEBUG nova.compute.utils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 772.253016] env[62476]: DEBUG nova.compute.manager [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Not allocating networking since 'none' was specified. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 772.264723] env[62476]: DEBUG nova.compute.manager [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 772.334258] env[62476]: DEBUG nova.compute.manager [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 772.360417] env[62476]: DEBUG nova.virt.hardware [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 772.360661] env[62476]: DEBUG nova.virt.hardware [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 772.360818] env[62476]: DEBUG nova.virt.hardware [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 772.360998] env[62476]: DEBUG nova.virt.hardware [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 772.361240] env[62476]: DEBUG nova.virt.hardware [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 772.361403] env[62476]: DEBUG nova.virt.hardware [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 772.361613] env[62476]: DEBUG nova.virt.hardware [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 772.361771] env[62476]: DEBUG nova.virt.hardware [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 772.361936] env[62476]: DEBUG nova.virt.hardware [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 772.362201] env[62476]: DEBUG nova.virt.hardware [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 772.362408] env[62476]: DEBUG nova.virt.hardware [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 772.363290] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5387b72a-8d98-41c5-ac32-0e010e6516e1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.371729] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217d2313-a3a8-4927-9ab9-36b68bb25b9d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.385337] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Instance VIF info [] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 772.390904] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Creating folder: Project (c3fad05477dc4b119d8fa5fb5a9b1c40). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 772.391221] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-adc4751f-6e7d-4229-898d-a9c495856cbf {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.401599] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Created folder: Project (c3fad05477dc4b119d8fa5fb5a9b1c40) in parent group-v849485. [ 772.401797] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Creating folder: Instances. Parent ref: group-v849524. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 772.402057] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-384a0884-045e-42bf-a0e7-5ce52c7ad66e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.411035] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Created folder: Instances in parent group-v849524. [ 772.411255] env[62476]: DEBUG oslo.service.loopingcall [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 772.411455] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 772.411662] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ead6bc9-38c1-4aa6-9ff5-af3e5b0fef06 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.428670] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 772.428670] env[62476]: value = "task-4319054" [ 772.428670] env[62476]: _type = "Task" [ 772.428670] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.437035] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319054, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.939983] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319054, 'name': CreateVM_Task, 'duration_secs': 0.278585} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.940177] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 772.940585] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.940734] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.941063] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 772.941351] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e19bcae-5bce-4521-ba4f-d9b0809d0ed9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.946480] env[62476]: DEBUG oslo_vmware.api [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Waiting for the task: (returnval){ [ 772.946480] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]527ad659-c209-2a3a-0627-c441302ababf" [ 772.946480] env[62476]: _type = "Task" [ 772.946480] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.955127] env[62476]: DEBUG oslo_vmware.api [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]527ad659-c209-2a3a-0627-c441302ababf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.457894] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.458266] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 773.458366] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.035747] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Acquiring lock "f4e97733-101b-46dd-aec4-a3287b120eb0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.035747] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Lock "f4e97733-101b-46dd-aec4-a3287b120eb0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.306937] env[62476]: DEBUG oslo_concurrency.lockutils [None req-49906688-f7e6-4ae4-85ce-7e4c74c37210 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Acquiring lock "4a9416ca-21ad-42eb-9ffd-a0009d6d96a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.307231] env[62476]: DEBUG oslo_concurrency.lockutils [None req-49906688-f7e6-4ae4-85ce-7e4c74c37210 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Lock "4a9416ca-21ad-42eb-9ffd-a0009d6d96a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.027599] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.027940] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.027993] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.027993] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 808.027090] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 808.027291] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 808.027434] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 808.042042] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.042346] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.042346] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.042500] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 808.043660] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3e60d8-5529-4878-8e77-cd522e794817 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.052742] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5972c63-3256-4c42-b2c3-49e5423d9ca7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.067402] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4625908-4dec-4960-8212-fd6466639db7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.074939] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f6237a-1884-491a-81f9-c48b2ec76cec {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.106185] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180676MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 808.106382] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.106549] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.186687] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 187242f5-934b-4c1d-b8ac-2ce8c347351a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.186929] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.187127] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6c3f0540-a722-4a13-9982-f40c2d6ce9b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.187311] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 760f3c9b-044d-4593-bc97-535ac09c3f3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.187487] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1323e67f-17c6-4432-8eea-98c285745766 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.187661] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance d7ec9b10-5975-4148-9931-3e7b0999b373 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.187845] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e41d1a8c-ad7e-4151-9745-04318b007dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.188023] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a918c107-526d-4cb7-a7dd-735a7d6420a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.188200] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ebd0c337-82cd-4d0a-9089-b9e2c72c417d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.188366] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 029e3122-7587-4675-b9d9-47cf8ffdbd1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.199825] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.211250] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a9eb98c9-73cd-453f-9d6f-f6fe5e0a6c0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.222082] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance d271fa1d-d7f3-4abf-9b5f-69396c4c128c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.234555] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a0b1a835-21b9-4d9d-a5d7-35479a8c4b6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.246506] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance da5c9742-9dba-4691-9bda-25858915857b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.256818] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3d2bc5b9-cef1-4c45-a493-00a89bd2dfcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.268026] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ab7da5f4-9460-4d70-a0e5-5a690284d0e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.279255] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 380bc9c3-8bba-4f26-b938-e4e74543261c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.290040] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 971698f2-c127-4f21-ae3f-3bb863742982 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.300029] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0561164b-f3f9-446f-b597-4b6d16a32a00 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.311923] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 327a282e-b502-4644-a152-0e77ec399fe7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.323039] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4770186f-8bd4-455c-a21d-f79e2230fa4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.334638] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance c60488a7-7d3b-49af-8b4d-9aad718a37a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.345391] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 02f37f91-5ee0-46bb-a5e2-ec8256c1f22c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.357522] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 9ea6880c-469b-4c66-927e-442a41e22163 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.369162] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7c661fb6-abb7-486b-9188-f8d4dd6bb1a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.381978] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7620aead-4244-47ac-be0a-6614d03ec2c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.395576] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 8ce40d44-062c-47cf-be36-d8ed6d924094 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.407051] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a2008090-914b-448c-8c60-776d4032e091 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.417765] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1e005b4d-7f94-4263-ba5d-303af209c408 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.431559] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f4e97733-101b-46dd-aec4-a3287b120eb0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.442286] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4a9416ca-21ad-42eb-9ffd-a0009d6d96a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.442626] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 808.442806] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 808.846432] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0cfc167-a379-43db-adee-6e98e426d263 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.854701] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a26fc2a-0e25-43a7-a2fb-245bdd047c88 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.885622] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971b1e91-c8f8-40c0-bbfe-edbc19748306 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.895167] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c3464f-5cc3-477a-a06f-07ff428c6cf5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.908256] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.917265] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 808.935648] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 808.935862] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.829s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.936256] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 810.023308] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 810.046524] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 810.046524] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 810.046524] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 810.067347] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 810.067347] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 810.067347] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 810.067347] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 810.067347] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 810.067701] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 810.067701] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 810.067701] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 810.067701] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 810.067804] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 810.067910] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 810.068448] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.039449] env[62476]: WARNING oslo_vmware.rw_handles [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 820.039449] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 820.039449] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 820.039449] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 820.039449] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 820.039449] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 820.039449] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 820.039449] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 820.039449] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 820.039449] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 820.039449] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 820.039449] env[62476]: ERROR oslo_vmware.rw_handles [ 820.040113] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/0ccd2385-8249-4681-a609-41a2dd03c69d/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 820.041619] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 820.041957] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Copying Virtual Disk [datastore1] vmware_temp/0ccd2385-8249-4681-a609-41a2dd03c69d/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/0ccd2385-8249-4681-a609-41a2dd03c69d/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 820.042182] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5405547e-0141-4173-a57c-3213158efe4e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.051768] env[62476]: DEBUG oslo_vmware.api [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Waiting for the task: (returnval){ [ 820.051768] env[62476]: value = "task-4319055" [ 820.051768] env[62476]: _type = "Task" [ 820.051768] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.059884] env[62476]: DEBUG oslo_vmware.api [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Task: {'id': task-4319055, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.565028] env[62476]: DEBUG oslo_vmware.exceptions [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 820.565028] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.565028] env[62476]: ERROR nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 820.565028] env[62476]: Faults: ['InvalidArgument'] [ 820.565028] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Traceback (most recent call last): [ 820.565028] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 820.565028] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] yield resources [ 820.565028] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 820.565028] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] self.driver.spawn(context, instance, image_meta, [ 820.565353] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 820.565353] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 820.565353] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 820.565353] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] self._fetch_image_if_missing(context, vi) [ 820.565353] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 820.565353] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] image_cache(vi, tmp_image_ds_loc) [ 820.565353] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 820.565353] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] vm_util.copy_virtual_disk( [ 820.565353] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 820.565353] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] session._wait_for_task(vmdk_copy_task) [ 820.565353] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 820.565353] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] return self.wait_for_task(task_ref) [ 820.565353] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 820.565714] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] return evt.wait() [ 820.565714] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 820.565714] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] result = hub.switch() [ 820.565714] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 820.565714] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] return self.greenlet.switch() [ 820.565714] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 820.565714] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] self.f(*self.args, **self.kw) [ 820.565714] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 820.565714] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] raise exceptions.translate_fault(task_info.error) [ 820.565714] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 820.565714] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Faults: ['InvalidArgument'] [ 820.565714] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] [ 820.566099] env[62476]: INFO nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Terminating instance [ 820.566099] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.566099] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 820.566099] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f519e815-6603-4a47-9219-20e811bef65c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.569425] env[62476]: DEBUG nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 820.569615] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 820.571047] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c9083c-2c51-4341-9699-80acc806a5c4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.578177] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 820.579252] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3962fd3b-23a9-4407-aba7-d7c3aa98c4e8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.580797] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 820.580892] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 820.581612] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-195cd59e-2285-43a5-959c-7a018781b94a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.587024] env[62476]: DEBUG oslo_vmware.api [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Waiting for the task: (returnval){ [ 820.587024] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52bce40f-ef40-d3fb-f0ec-5e6d239f98d2" [ 820.587024] env[62476]: _type = "Task" [ 820.587024] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.594788] env[62476]: DEBUG oslo_vmware.api [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52bce40f-ef40-d3fb-f0ec-5e6d239f98d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.097663] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 821.097952] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Creating directory with path [datastore1] vmware_temp/72bd0572-a8a4-49a0-90b2-b27072a7f050/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 821.098217] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25dff567-41af-43c5-8c95-945cbaa02d1f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.119999] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Created directory with path [datastore1] vmware_temp/72bd0572-a8a4-49a0-90b2-b27072a7f050/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 821.120274] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Fetch image to [datastore1] vmware_temp/72bd0572-a8a4-49a0-90b2-b27072a7f050/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 821.120448] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/72bd0572-a8a4-49a0-90b2-b27072a7f050/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 821.121299] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d2e11f-1d0b-4b0c-96ef-d6f524f90948 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.128629] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd77f3db-80b8-401f-844b-3d25440d5791 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.137793] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b501fddf-90f4-4fa4-9ccf-5593b9d5911b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.169584] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1e238c-c63c-4011-b9a0-06ad225c9d7d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.176527] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d84829fc-d357-41e5-90d6-3d50f72a14a6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.207933] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 821.264641] env[62476]: DEBUG oslo_vmware.rw_handles [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/72bd0572-a8a4-49a0-90b2-b27072a7f050/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 821.325581] env[62476]: DEBUG oslo_vmware.rw_handles [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 821.325856] env[62476]: DEBUG oslo_vmware.rw_handles [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/72bd0572-a8a4-49a0-90b2-b27072a7f050/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 821.946325] env[62476]: DEBUG oslo_concurrency.lockutils [None req-8cfbc3e1-ad5b-40ec-b532-23ec613abe7d tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Acquiring lock "187242f5-934b-4c1d-b8ac-2ce8c347351a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.773043] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 822.773424] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 822.773629] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Deleting the datastore file [datastore1] 187242f5-934b-4c1d-b8ac-2ce8c347351a {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 822.773841] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80b6fa3a-4c76-48ad-9fc7-f3eefcde55f3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.780506] env[62476]: DEBUG oslo_vmware.api [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Waiting for the task: (returnval){ [ 822.780506] env[62476]: value = "task-4319057" [ 822.780506] env[62476]: _type = "Task" [ 822.780506] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.789251] env[62476]: DEBUG oslo_vmware.api [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Task: {'id': task-4319057, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.290439] env[62476]: DEBUG oslo_vmware.api [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Task: {'id': task-4319057, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107781} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.290707] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 823.290978] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 823.291074] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 823.291251] env[62476]: INFO nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Took 2.72 seconds to destroy the instance on the hypervisor. [ 823.293659] env[62476]: DEBUG nova.compute.claims [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 823.293836] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.294068] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.731687] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521a250e-5461-4c4b-bd64-7e3ae0c2c6ec {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.739602] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db837065-c716-401d-bd63-08a4b75f2a6f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.769381] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b563f7e-1eb9-4fe0-bb44-d9d3ba0f1601 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.779111] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8141977d-b45c-49c7-836a-9f80598ffb8e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.791925] env[62476]: DEBUG nova.compute.provider_tree [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.801211] env[62476]: DEBUG nova.scheduler.client.report [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 823.818115] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.524s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.818706] env[62476]: ERROR nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 823.818706] env[62476]: Faults: ['InvalidArgument'] [ 823.818706] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Traceback (most recent call last): [ 823.818706] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 823.818706] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] self.driver.spawn(context, instance, image_meta, [ 823.818706] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 823.818706] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 823.818706] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 823.818706] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] self._fetch_image_if_missing(context, vi) [ 823.818706] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 823.818706] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] image_cache(vi, tmp_image_ds_loc) [ 823.818706] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 823.819079] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] vm_util.copy_virtual_disk( [ 823.819079] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 823.819079] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] session._wait_for_task(vmdk_copy_task) [ 823.819079] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 823.819079] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] return self.wait_for_task(task_ref) [ 823.819079] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 823.819079] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] return evt.wait() [ 823.819079] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 823.819079] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] result = hub.switch() [ 823.819079] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 823.819079] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] return self.greenlet.switch() [ 823.819079] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 823.819079] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] self.f(*self.args, **self.kw) [ 823.819436] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 823.819436] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] raise exceptions.translate_fault(task_info.error) [ 823.819436] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 823.819436] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Faults: ['InvalidArgument'] [ 823.819436] env[62476]: ERROR nova.compute.manager [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] [ 823.819588] env[62476]: DEBUG nova.compute.utils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 823.821662] env[62476]: DEBUG nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Build of instance 187242f5-934b-4c1d-b8ac-2ce8c347351a was re-scheduled: A specified parameter was not correct: fileType [ 823.821662] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 823.821662] env[62476]: DEBUG nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 823.821662] env[62476]: DEBUG nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 823.821872] env[62476]: DEBUG nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 823.821955] env[62476]: DEBUG nova.network.neutron [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 824.231993] env[62476]: DEBUG nova.network.neutron [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.248011] env[62476]: INFO nova.compute.manager [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Took 0.43 seconds to deallocate network for instance. [ 824.359483] env[62476]: INFO nova.scheduler.client.report [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Deleted allocations for instance 187242f5-934b-4c1d-b8ac-2ce8c347351a [ 824.384579] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb769d2b-15ba-4508-bcaa-f67ea9baedd7 tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Lock "187242f5-934b-4c1d-b8ac-2ce8c347351a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.017s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.386741] env[62476]: DEBUG oslo_concurrency.lockutils [None req-8cfbc3e1-ad5b-40ec-b532-23ec613abe7d tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Lock "187242f5-934b-4c1d-b8ac-2ce8c347351a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 2.440s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.386741] env[62476]: DEBUG oslo_concurrency.lockutils [None req-8cfbc3e1-ad5b-40ec-b532-23ec613abe7d tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Acquiring lock "187242f5-934b-4c1d-b8ac-2ce8c347351a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.386741] env[62476]: DEBUG oslo_concurrency.lockutils [None req-8cfbc3e1-ad5b-40ec-b532-23ec613abe7d tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Lock "187242f5-934b-4c1d-b8ac-2ce8c347351a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.386998] env[62476]: DEBUG oslo_concurrency.lockutils [None req-8cfbc3e1-ad5b-40ec-b532-23ec613abe7d tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Lock "187242f5-934b-4c1d-b8ac-2ce8c347351a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.388550] env[62476]: INFO nova.compute.manager [None req-8cfbc3e1-ad5b-40ec-b532-23ec613abe7d tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Terminating instance [ 824.390260] env[62476]: DEBUG nova.compute.manager [None req-8cfbc3e1-ad5b-40ec-b532-23ec613abe7d tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 824.392017] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-8cfbc3e1-ad5b-40ec-b532-23ec613abe7d tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 824.392017] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84dd0003-226e-4e3f-8721-c558aeee0feb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.401862] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf9a604-c2ba-4b0f-af21-da5244070d54 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.412490] env[62476]: DEBUG nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 824.434382] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-8cfbc3e1-ad5b-40ec-b532-23ec613abe7d tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 187242f5-934b-4c1d-b8ac-2ce8c347351a could not be found. [ 824.434695] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-8cfbc3e1-ad5b-40ec-b532-23ec613abe7d tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 824.434900] env[62476]: INFO nova.compute.manager [None req-8cfbc3e1-ad5b-40ec-b532-23ec613abe7d tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 824.435215] env[62476]: DEBUG oslo.service.loopingcall [None req-8cfbc3e1-ad5b-40ec-b532-23ec613abe7d tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 824.435436] env[62476]: DEBUG nova.compute.manager [-] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 824.435507] env[62476]: DEBUG nova.network.neutron [-] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 824.463156] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.463156] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.464494] env[62476]: INFO nova.compute.claims [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.468251] env[62476]: DEBUG nova.network.neutron [-] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.478367] env[62476]: INFO nova.compute.manager [-] [instance: 187242f5-934b-4c1d-b8ac-2ce8c347351a] Took 0.04 seconds to deallocate network for instance. [ 824.531099] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e805632e-e8f8-4d4f-81ea-272903c7345b tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Acquiring lock "1f9496b6-ff55-473d-8b82-d1e4e3afe0f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.637838] env[62476]: DEBUG oslo_concurrency.lockutils [None req-8cfbc3e1-ad5b-40ec-b532-23ec613abe7d tempest-ImagesOneServerTestJSON-488435541 tempest-ImagesOneServerTestJSON-488435541-project-member] Lock "187242f5-934b-4c1d-b8ac-2ce8c347351a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.252s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.965834] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b257f88-d92b-4fa2-88ac-3e87f92b93f0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.973880] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72d1321-bb0d-4f38-8976-57fd590c13fe {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.004600] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873261bf-baeb-4f05-bcd3-a12970145c90 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.012783] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2bcdc5-bb16-4125-86bf-787eeaf05936 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.027059] env[62476]: DEBUG nova.compute.provider_tree [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.035624] env[62476]: DEBUG nova.scheduler.client.report [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 825.054626] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.591s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.054626] env[62476]: DEBUG nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 825.106018] env[62476]: DEBUG nova.compute.utils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 825.107428] env[62476]: DEBUG nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 825.107646] env[62476]: DEBUG nova.network.neutron [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 825.118233] env[62476]: DEBUG nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 825.186776] env[62476]: DEBUG nova.policy [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a731f25b9074050b7bc1ef630c45519', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '541ebb32b6e948b190a13f4d507d0e7b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 825.194754] env[62476]: DEBUG nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 825.221274] env[62476]: DEBUG nova.virt.hardware [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 825.221548] env[62476]: DEBUG nova.virt.hardware [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 825.221766] env[62476]: DEBUG nova.virt.hardware [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.221985] env[62476]: DEBUG nova.virt.hardware [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 825.222204] env[62476]: DEBUG nova.virt.hardware [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.222366] env[62476]: DEBUG nova.virt.hardware [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 825.222629] env[62476]: DEBUG nova.virt.hardware [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 825.222854] env[62476]: DEBUG nova.virt.hardware [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 825.223076] env[62476]: DEBUG nova.virt.hardware [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 825.223273] env[62476]: DEBUG nova.virt.hardware [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 825.223490] env[62476]: DEBUG nova.virt.hardware [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 825.224484] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13097f6-1327-4044-be26-87a22bf4199f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.234702] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca924d7-d8fd-4c7c-8fec-a3afe5a9b07e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.668476] env[62476]: DEBUG nova.network.neutron [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Successfully created port: 6bd45ddc-c846-465f-b7d6-0339212310d9 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 826.949916] env[62476]: DEBUG nova.network.neutron [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Successfully updated port: 6bd45ddc-c846-465f-b7d6-0339212310d9 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 826.965376] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Acquiring lock "refresh_cache-0524dc08-ac1a-4f56-b44a-adbb5a0b5038" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.965619] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Acquired lock "refresh_cache-0524dc08-ac1a-4f56-b44a-adbb5a0b5038" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.965862] env[62476]: DEBUG nova.network.neutron [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 826.980607] env[62476]: DEBUG oslo_concurrency.lockutils [None req-33cf0e16-171c-4f94-9565-eaed058f1093 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "6c3f0540-a722-4a13-9982-f40c2d6ce9b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.062373] env[62476]: DEBUG nova.compute.manager [req-eae1b299-4deb-4c6b-9709-30dfb86c1467 req-39a62ed8-50b9-4103-aba2-b1f2679f76c9 service nova] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Received event network-vif-plugged-6bd45ddc-c846-465f-b7d6-0339212310d9 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 827.062626] env[62476]: DEBUG oslo_concurrency.lockutils [req-eae1b299-4deb-4c6b-9709-30dfb86c1467 req-39a62ed8-50b9-4103-aba2-b1f2679f76c9 service nova] Acquiring lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.062860] env[62476]: DEBUG oslo_concurrency.lockutils [req-eae1b299-4deb-4c6b-9709-30dfb86c1467 req-39a62ed8-50b9-4103-aba2-b1f2679f76c9 service nova] Lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.063059] env[62476]: DEBUG oslo_concurrency.lockutils [req-eae1b299-4deb-4c6b-9709-30dfb86c1467 req-39a62ed8-50b9-4103-aba2-b1f2679f76c9 service nova] Lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.063241] env[62476]: DEBUG nova.compute.manager [req-eae1b299-4deb-4c6b-9709-30dfb86c1467 req-39a62ed8-50b9-4103-aba2-b1f2679f76c9 service nova] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] No waiting events found dispatching network-vif-plugged-6bd45ddc-c846-465f-b7d6-0339212310d9 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 827.063404] env[62476]: WARNING nova.compute.manager [req-eae1b299-4deb-4c6b-9709-30dfb86c1467 req-39a62ed8-50b9-4103-aba2-b1f2679f76c9 service nova] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Received unexpected event network-vif-plugged-6bd45ddc-c846-465f-b7d6-0339212310d9 for instance with vm_state building and task_state spawning. [ 827.064683] env[62476]: DEBUG nova.network.neutron [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 827.488512] env[62476]: DEBUG nova.network.neutron [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Updating instance_info_cache with network_info: [{"id": "6bd45ddc-c846-465f-b7d6-0339212310d9", "address": "fa:16:3e:85:85:09", "network": {"id": "ee73eee5-ac5a-42fd-b5f8-5602514435e9", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2016612819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "541ebb32b6e948b190a13f4d507d0e7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "34a581cb-6d33-4e2e-af50-735a6749d6da", "external-id": "nsx-vlan-transportzone-673", "segmentation_id": 673, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bd45ddc-c8", "ovs_interfaceid": "6bd45ddc-c846-465f-b7d6-0339212310d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.504872] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Releasing lock "refresh_cache-0524dc08-ac1a-4f56-b44a-adbb5a0b5038" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.505172] env[62476]: DEBUG nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Instance network_info: |[{"id": "6bd45ddc-c846-465f-b7d6-0339212310d9", "address": "fa:16:3e:85:85:09", "network": {"id": "ee73eee5-ac5a-42fd-b5f8-5602514435e9", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2016612819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "541ebb32b6e948b190a13f4d507d0e7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "34a581cb-6d33-4e2e-af50-735a6749d6da", "external-id": "nsx-vlan-transportzone-673", "segmentation_id": 673, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bd45ddc-c8", "ovs_interfaceid": "6bd45ddc-c846-465f-b7d6-0339212310d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 827.505852] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:85:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '34a581cb-6d33-4e2e-af50-735a6749d6da', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6bd45ddc-c846-465f-b7d6-0339212310d9', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 827.514733] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Creating folder: Project (541ebb32b6e948b190a13f4d507d0e7b). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 827.515352] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0dfe72d8-b078-4a1a-b25b-a1f4a3b1098f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.527151] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Created folder: Project (541ebb32b6e948b190a13f4d507d0e7b) in parent group-v849485. [ 827.527365] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Creating folder: Instances. Parent ref: group-v849527. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 827.527613] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-211bd708-e94c-4ce4-aeb9-a1c0320119d3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.537475] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Created folder: Instances in parent group-v849527. [ 827.537721] env[62476]: DEBUG oslo.service.loopingcall [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 827.537910] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 827.538266] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43980934-766a-4b75-89a8-2c0052ad1776 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.560024] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 827.560024] env[62476]: value = "task-4319060" [ 827.560024] env[62476]: _type = "Task" [ 827.560024] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.567631] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319060, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.070081] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319060, 'name': CreateVM_Task} progress is 99%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.570076] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319060, 'name': CreateVM_Task} progress is 99%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.072394] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319060, 'name': CreateVM_Task} progress is 99%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.204882] env[62476]: DEBUG nova.compute.manager [req-8c0802b3-9e55-4211-9df3-a1fc4629d0ba req-980f456e-c317-49dd-b384-6e7b6359de9c service nova] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Received event network-changed-6bd45ddc-c846-465f-b7d6-0339212310d9 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 829.205103] env[62476]: DEBUG nova.compute.manager [req-8c0802b3-9e55-4211-9df3-a1fc4629d0ba req-980f456e-c317-49dd-b384-6e7b6359de9c service nova] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Refreshing instance network info cache due to event network-changed-6bd45ddc-c846-465f-b7d6-0339212310d9. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 829.205322] env[62476]: DEBUG oslo_concurrency.lockutils [req-8c0802b3-9e55-4211-9df3-a1fc4629d0ba req-980f456e-c317-49dd-b384-6e7b6359de9c service nova] Acquiring lock "refresh_cache-0524dc08-ac1a-4f56-b44a-adbb5a0b5038" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.206038] env[62476]: DEBUG oslo_concurrency.lockutils [req-8c0802b3-9e55-4211-9df3-a1fc4629d0ba req-980f456e-c317-49dd-b384-6e7b6359de9c service nova] Acquired lock "refresh_cache-0524dc08-ac1a-4f56-b44a-adbb5a0b5038" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.206038] env[62476]: DEBUG nova.network.neutron [req-8c0802b3-9e55-4211-9df3-a1fc4629d0ba req-980f456e-c317-49dd-b384-6e7b6359de9c service nova] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Refreshing network info cache for port 6bd45ddc-c846-465f-b7d6-0339212310d9 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 829.573620] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319060, 'name': CreateVM_Task, 'duration_secs': 1.992088} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.573670] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 829.574351] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.574628] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.574831] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 829.575095] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d30ccd78-3616-4068-8816-c76f059aa1d4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.580184] env[62476]: DEBUG oslo_vmware.api [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Waiting for the task: (returnval){ [ 829.580184] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]524df208-f363-8cfe-ec93-98655974b83b" [ 829.580184] env[62476]: _type = "Task" [ 829.580184] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.589690] env[62476]: DEBUG oslo_vmware.api [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]524df208-f363-8cfe-ec93-98655974b83b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.698843] env[62476]: DEBUG nova.network.neutron [req-8c0802b3-9e55-4211-9df3-a1fc4629d0ba req-980f456e-c317-49dd-b384-6e7b6359de9c service nova] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Updated VIF entry in instance network info cache for port 6bd45ddc-c846-465f-b7d6-0339212310d9. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 829.699236] env[62476]: DEBUG nova.network.neutron [req-8c0802b3-9e55-4211-9df3-a1fc4629d0ba req-980f456e-c317-49dd-b384-6e7b6359de9c service nova] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Updating instance_info_cache with network_info: [{"id": "6bd45ddc-c846-465f-b7d6-0339212310d9", "address": "fa:16:3e:85:85:09", "network": {"id": "ee73eee5-ac5a-42fd-b5f8-5602514435e9", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2016612819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "541ebb32b6e948b190a13f4d507d0e7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "34a581cb-6d33-4e2e-af50-735a6749d6da", "external-id": "nsx-vlan-transportzone-673", "segmentation_id": 673, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bd45ddc-c8", "ovs_interfaceid": "6bd45ddc-c846-465f-b7d6-0339212310d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.718027] env[62476]: DEBUG oslo_concurrency.lockutils [req-8c0802b3-9e55-4211-9df3-a1fc4629d0ba req-980f456e-c317-49dd-b384-6e7b6359de9c service nova] Releasing lock "refresh_cache-0524dc08-ac1a-4f56-b44a-adbb5a0b5038" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.096169] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.096969] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 830.097209] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.938824] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.939136] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.565699] env[62476]: DEBUG oslo_concurrency.lockutils [None req-308ca898-4994-40db-a3de-187cc2a9004a tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Acquiring lock "760f3c9b-044d-4593-bc97-535ac09c3f3b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.748647] env[62476]: DEBUG oslo_concurrency.lockutils [None req-25255a71-d88b-4527-9b3c-07861d149202 tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Acquiring lock "1323e67f-17c6-4432-8eea-98c285745766" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.951675] env[62476]: DEBUG oslo_concurrency.lockutils [None req-945eaaaf-c885-4c0a-b27c-c7e394a45613 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Acquiring lock "e41d1a8c-ad7e-4151-9745-04318b007dfa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.843685] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ee183aa0-6cd4-455d-8371-50be3ded8ef2 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "a918c107-526d-4cb7-a7dd-735a7d6420a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.790567] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0376f9eb-2878-4d0c-bcb3-07aa222c0ce3 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.456802] env[62476]: DEBUG oslo_concurrency.lockutils [None req-8ede5de0-0c5f-4c7d-91aa-b1169a84fd6f tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Acquiring lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.220550] env[62476]: DEBUG oslo_concurrency.lockutils [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquiring lock "029e3122-7587-4675-b9d9-47cf8ffdbd1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.030917] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.032032] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.032032] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Cleaning up deleted instances {{(pid=62476) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 866.050817] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] There are 0 instances to clean {{(pid=62476) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 866.051084] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.052089] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Cleaning up deleted instances with incomplete migration {{(pid=62476) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 866.074874] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 867.078587] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.029168] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.029168] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.051064] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.051185] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.051360] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.051516] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 868.053105] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f834006f-16f4-4a73-8f3b-5886e4f62d44 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.066791] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba8a228-e764-4a1d-9129-23eac91e66b2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.082212] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d04b377-9b43-49b4-9ad3-f81aa1c3b0bb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.089812] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e74ce2f-c3aa-4f41-8be1-d7c17a957c9d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.121708] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180672MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 868.121871] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.122097] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.228812] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.228976] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6c3f0540-a722-4a13-9982-f40c2d6ce9b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.229123] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 760f3c9b-044d-4593-bc97-535ac09c3f3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.229251] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1323e67f-17c6-4432-8eea-98c285745766 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.229373] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance d7ec9b10-5975-4148-9931-3e7b0999b373 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.229505] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e41d1a8c-ad7e-4151-9745-04318b007dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.229628] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a918c107-526d-4cb7-a7dd-735a7d6420a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.229744] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ebd0c337-82cd-4d0a-9089-b9e2c72c417d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.229857] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 029e3122-7587-4675-b9d9-47cf8ffdbd1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.229971] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 868.248141] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 380bc9c3-8bba-4f26-b938-e4e74543261c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.267009] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 971698f2-c127-4f21-ae3f-3bb863742982 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.281204] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0561164b-f3f9-446f-b597-4b6d16a32a00 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.297577] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 327a282e-b502-4644-a152-0e77ec399fe7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.315359] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4770186f-8bd4-455c-a21d-f79e2230fa4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.328435] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance c60488a7-7d3b-49af-8b4d-9aad718a37a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.342730] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 02f37f91-5ee0-46bb-a5e2-ec8256c1f22c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.359117] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 9ea6880c-469b-4c66-927e-442a41e22163 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.379950] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7c661fb6-abb7-486b-9188-f8d4dd6bb1a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.403058] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7620aead-4244-47ac-be0a-6614d03ec2c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.414068] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 8ce40d44-062c-47cf-be36-d8ed6d924094 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.428967] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a2008090-914b-448c-8c60-776d4032e091 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.441795] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1e005b4d-7f94-4263-ba5d-303af209c408 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.457541] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f4e97733-101b-46dd-aec4-a3287b120eb0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.471264] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3f808bd9-50bd-474a-9bcd-3ff9cacd86dc tempest-ServersTestJSON-2046354372 tempest-ServersTestJSON-2046354372-project-member] Acquiring lock "5169fc76-eb51-45f0-9f19-737fb3213125" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.471560] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3f808bd9-50bd-474a-9bcd-3ff9cacd86dc tempest-ServersTestJSON-2046354372 tempest-ServersTestJSON-2046354372-project-member] Lock "5169fc76-eb51-45f0-9f19-737fb3213125" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.474999] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4a9416ca-21ad-42eb-9ffd-a0009d6d96a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.504042] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 868.504302] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 868.504441] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 868.804946] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d62f1e0-dce6-49b6-86c5-d1c1cebe383f tempest-MigrationsAdminTest-1701507521 tempest-MigrationsAdminTest-1701507521-project-member] Acquiring lock "31afc3bf-67c8-481a-9413-e69b5d6bf74f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.805172] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d62f1e0-dce6-49b6-86c5-d1c1cebe383f tempest-MigrationsAdminTest-1701507521 tempest-MigrationsAdminTest-1701507521-project-member] Lock "31afc3bf-67c8-481a-9413-e69b5d6bf74f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.957097] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9cd3e6f-4b0b-4da7-a36f-01a97a1ddd1a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.965846] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a25d6b-7a69-45dd-a90a-c23c3a7248bc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.002697] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3779a7b3-c4d2-4761-a2d6-7dbaf1ee1d2b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.011197] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e1aefd-5882-4878-baf1-9d74890c54be {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.025999] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.036018] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 869.054483] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 869.054639] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.933s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.660073] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e47f7d9b-0846-4022-b0d4-f184daa30b13 tempest-ServerActionsTestOtherA-421018234 tempest-ServerActionsTestOtherA-421018234-project-member] Acquiring lock "92e0c9fd-582c-4118-b7e0-0fb822b1c38e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.660453] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e47f7d9b-0846-4022-b0d4-f184daa30b13 tempest-ServerActionsTestOtherA-421018234 tempest-ServerActionsTestOtherA-421018234-project-member] Lock "92e0c9fd-582c-4118-b7e0-0fb822b1c38e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.831197] env[62476]: WARNING oslo_vmware.rw_handles [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 869.831197] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 869.831197] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 869.831197] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 869.831197] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 869.831197] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 869.831197] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 869.831197] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 869.831197] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 869.831197] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 869.831197] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 869.831197] env[62476]: ERROR oslo_vmware.rw_handles [ 869.831648] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/72bd0572-a8a4-49a0-90b2-b27072a7f050/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 869.833535] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 869.833790] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Copying Virtual Disk [datastore1] vmware_temp/72bd0572-a8a4-49a0-90b2-b27072a7f050/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/72bd0572-a8a4-49a0-90b2-b27072a7f050/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 869.840018] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe3bd1a1-038a-4f8c-b806-35559841a168 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.847195] env[62476]: DEBUG oslo_vmware.api [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Waiting for the task: (returnval){ [ 869.847195] env[62476]: value = "task-4319061" [ 869.847195] env[62476]: _type = "Task" [ 869.847195] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.859792] env[62476]: DEBUG oslo_vmware.api [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Task: {'id': task-4319061, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.053561] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.053561] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 870.053561] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 870.074483] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 870.074656] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 870.074914] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 870.074914] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 870.075065] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 870.075144] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 870.075261] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 870.075381] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 870.075497] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 870.075612] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 870.075839] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 870.076233] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.076423] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.076659] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 870.360292] env[62476]: DEBUG oslo_vmware.exceptions [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 870.360366] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.360950] env[62476]: ERROR nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 870.360950] env[62476]: Faults: ['InvalidArgument'] [ 870.360950] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Traceback (most recent call last): [ 870.360950] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 870.360950] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] yield resources [ 870.360950] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 870.360950] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] self.driver.spawn(context, instance, image_meta, [ 870.360950] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 870.360950] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 870.360950] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 870.360950] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] self._fetch_image_if_missing(context, vi) [ 870.360950] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 870.361277] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] image_cache(vi, tmp_image_ds_loc) [ 870.361277] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 870.361277] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] vm_util.copy_virtual_disk( [ 870.361277] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 870.361277] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] session._wait_for_task(vmdk_copy_task) [ 870.361277] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 870.361277] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] return self.wait_for_task(task_ref) [ 870.361277] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 870.361277] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] return evt.wait() [ 870.361277] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 870.361277] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] result = hub.switch() [ 870.361277] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 870.361277] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] return self.greenlet.switch() [ 870.365161] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 870.365161] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] self.f(*self.args, **self.kw) [ 870.365161] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 870.365161] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] raise exceptions.translate_fault(task_info.error) [ 870.365161] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 870.365161] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Faults: ['InvalidArgument'] [ 870.365161] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] [ 870.365161] env[62476]: INFO nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Terminating instance [ 870.365161] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.365474] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 870.365474] env[62476]: DEBUG nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 870.365474] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 870.365474] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bffa3366-902c-4e6f-945a-7b54fd6e87fb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.367043] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02bc1c7f-b8ee-4444-90f9-b8f2db4a3fe0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.374853] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 870.376023] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aea403f5-eb01-4527-94a6-ebd24816d2e7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.377585] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 870.377761] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 870.378908] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34013bf9-d0c0-4459-bd9e-45cfcc0ff879 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.384646] env[62476]: DEBUG oslo_vmware.api [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for the task: (returnval){ [ 870.384646] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52fa06dd-8ae4-1ea3-5aef-32401715d3de" [ 870.384646] env[62476]: _type = "Task" [ 870.384646] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.393481] env[62476]: DEBUG oslo_vmware.api [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52fa06dd-8ae4-1ea3-5aef-32401715d3de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.452695] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 870.452869] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 870.453114] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Deleting the datastore file [datastore1] 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.453354] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-716a2dda-9251-4fb9-9769-1d0c66c92eb1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.460869] env[62476]: DEBUG oslo_vmware.api [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Waiting for the task: (returnval){ [ 870.460869] env[62476]: value = "task-4319063" [ 870.460869] env[62476]: _type = "Task" [ 870.460869] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.472533] env[62476]: DEBUG oslo_vmware.api [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Task: {'id': task-4319063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.902137] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 870.902490] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Creating directory with path [datastore1] vmware_temp/07c92e2c-0cbf-4a03-91ff-8f2a0a8d02a4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 870.902761] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b02c15f-7df3-422a-b16c-e77b398a8bf9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.932839] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Created directory with path [datastore1] vmware_temp/07c92e2c-0cbf-4a03-91ff-8f2a0a8d02a4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 870.933077] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Fetch image to [datastore1] vmware_temp/07c92e2c-0cbf-4a03-91ff-8f2a0a8d02a4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 870.933263] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/07c92e2c-0cbf-4a03-91ff-8f2a0a8d02a4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 870.934125] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5891eeb-351f-41e3-b92e-81e0bcc3e061 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.946027] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e117d2-cb6b-43a0-aa18-8f2c48bff92b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.955483] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ba223e-21c7-4e5e-9d5c-2bfaaef5e3a4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.995026] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1924add-4e5d-46d9-a331-49eb61ab5ffc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.000590] env[62476]: DEBUG oslo_vmware.api [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Task: {'id': task-4319063, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097919} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.001231] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 871.001426] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 871.001600] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 871.002145] env[62476]: INFO nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Took 0.64 seconds to destroy the instance on the hypervisor. [ 871.006427] env[62476]: DEBUG nova.compute.claims [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 871.006483] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.006700] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.013074] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-75152471-1110-4fa0-aed7-9f0ab9f063e8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.027819] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 871.037024] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 871.116463] env[62476]: DEBUG oslo_vmware.rw_handles [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/07c92e2c-0cbf-4a03-91ff-8f2a0a8d02a4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 871.187878] env[62476]: DEBUG oslo_vmware.rw_handles [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 871.187878] env[62476]: DEBUG oslo_vmware.rw_handles [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/07c92e2c-0cbf-4a03-91ff-8f2a0a8d02a4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 871.203625] env[62476]: DEBUG nova.scheduler.client.report [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Refreshing inventories for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 871.231179] env[62476]: DEBUG nova.scheduler.client.report [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Updating ProviderTree inventory for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 871.231179] env[62476]: DEBUG nova.compute.provider_tree [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Updating inventory in ProviderTree for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 871.253512] env[62476]: DEBUG nova.scheduler.client.report [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Refreshing aggregate associations for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11, aggregates: None {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 871.280648] env[62476]: DEBUG nova.scheduler.client.report [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Refreshing trait associations for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 871.782952] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adaf074c-9111-478b-a108-b6a601b75456 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.796513] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e551176-7116-454d-b187-a5bc45be88ad {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.831703] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083cd6d4-7529-4633-9cc9-7b42f68016cf {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.840263] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90077821-5e67-4f4b-880a-88d8f6c5839a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.855271] env[62476]: DEBUG nova.compute.provider_tree [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.868093] env[62476]: DEBUG nova.scheduler.client.report [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 871.888010] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.881s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.888589] env[62476]: ERROR nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 871.888589] env[62476]: Faults: ['InvalidArgument'] [ 871.888589] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Traceback (most recent call last): [ 871.888589] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 871.888589] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] self.driver.spawn(context, instance, image_meta, [ 871.888589] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 871.888589] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 871.888589] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 871.888589] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] self._fetch_image_if_missing(context, vi) [ 871.888589] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 871.888589] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] image_cache(vi, tmp_image_ds_loc) [ 871.888589] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 871.888991] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] vm_util.copy_virtual_disk( [ 871.888991] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 871.888991] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] session._wait_for_task(vmdk_copy_task) [ 871.888991] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 871.888991] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] return self.wait_for_task(task_ref) [ 871.888991] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 871.888991] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] return evt.wait() [ 871.888991] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 871.888991] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] result = hub.switch() [ 871.888991] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 871.888991] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] return self.greenlet.switch() [ 871.888991] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 871.888991] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] self.f(*self.args, **self.kw) [ 871.889473] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 871.889473] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] raise exceptions.translate_fault(task_info.error) [ 871.889473] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 871.889473] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Faults: ['InvalidArgument'] [ 871.889473] env[62476]: ERROR nova.compute.manager [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] [ 871.889473] env[62476]: DEBUG nova.compute.utils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 871.893152] env[62476]: DEBUG nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Build of instance 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1 was re-scheduled: A specified parameter was not correct: fileType [ 871.893152] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 871.893152] env[62476]: DEBUG nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 871.893152] env[62476]: DEBUG nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 871.893343] env[62476]: DEBUG nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 871.893449] env[62476]: DEBUG nova.network.neutron [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 872.029767] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 872.614017] env[62476]: DEBUG nova.network.neutron [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.626772] env[62476]: INFO nova.compute.manager [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Took 0.73 seconds to deallocate network for instance. [ 872.786982] env[62476]: INFO nova.scheduler.client.report [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Deleted allocations for instance 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1 [ 872.821948] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c5beff2-0bc1-4e2d-ab55-6723201ed5ff tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Lock "1f9496b6-ff55-473d-8b82-d1e4e3afe0f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 249.481s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.823810] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e805632e-e8f8-4d4f-81ea-272903c7345b tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Lock "1f9496b6-ff55-473d-8b82-d1e4e3afe0f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 48.293s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.827616] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e805632e-e8f8-4d4f-81ea-272903c7345b tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Acquiring lock "1f9496b6-ff55-473d-8b82-d1e4e3afe0f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.827616] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e805632e-e8f8-4d4f-81ea-272903c7345b tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Lock "1f9496b6-ff55-473d-8b82-d1e4e3afe0f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.827616] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e805632e-e8f8-4d4f-81ea-272903c7345b tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Lock "1f9496b6-ff55-473d-8b82-d1e4e3afe0f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.830033] env[62476]: INFO nova.compute.manager [None req-e805632e-e8f8-4d4f-81ea-272903c7345b tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Terminating instance [ 872.833339] env[62476]: DEBUG nova.compute.manager [None req-e805632e-e8f8-4d4f-81ea-272903c7345b tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 872.834937] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e805632e-e8f8-4d4f-81ea-272903c7345b tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 872.835045] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29f89cf8-4518-4d3f-b0c6-b0af8b66dfe5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.849110] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7ab4d5-cdc3-4e40-a42c-0576bd90603d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.863777] env[62476]: DEBUG nova.compute.manager [None req-d0fa9ce0-5864-4e3c-af72-8e3c8bc89cb0 tempest-ServersTestFqdnHostnames-1462908703 tempest-ServersTestFqdnHostnames-1462908703-project-member] [instance: a9eb98c9-73cd-453f-9d6f-f6fe5e0a6c0f] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 872.887958] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-e805632e-e8f8-4d4f-81ea-272903c7345b tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1 could not be found. [ 872.888382] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e805632e-e8f8-4d4f-81ea-272903c7345b tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 872.888694] env[62476]: INFO nova.compute.manager [None req-e805632e-e8f8-4d4f-81ea-272903c7345b tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Took 0.06 seconds to destroy the instance on the hypervisor. [ 872.889038] env[62476]: DEBUG oslo.service.loopingcall [None req-e805632e-e8f8-4d4f-81ea-272903c7345b tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 872.889350] env[62476]: DEBUG nova.compute.manager [-] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 872.889502] env[62476]: DEBUG nova.network.neutron [-] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 872.904211] env[62476]: DEBUG nova.compute.manager [None req-d0fa9ce0-5864-4e3c-af72-8e3c8bc89cb0 tempest-ServersTestFqdnHostnames-1462908703 tempest-ServersTestFqdnHostnames-1462908703-project-member] [instance: a9eb98c9-73cd-453f-9d6f-f6fe5e0a6c0f] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 872.936786] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d0fa9ce0-5864-4e3c-af72-8e3c8bc89cb0 tempest-ServersTestFqdnHostnames-1462908703 tempest-ServersTestFqdnHostnames-1462908703-project-member] Lock "a9eb98c9-73cd-453f-9d6f-f6fe5e0a6c0f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.916s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.947220] env[62476]: DEBUG nova.compute.manager [None req-78244f68-4174-4661-bf27-e91b3e28ad11 tempest-FloatingIPsAssociationTestJSON-748075487 tempest-FloatingIPsAssociationTestJSON-748075487-project-member] [instance: d271fa1d-d7f3-4abf-9b5f-69396c4c128c] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 872.955905] env[62476]: DEBUG nova.network.neutron [-] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.966641] env[62476]: INFO nova.compute.manager [-] [instance: 1f9496b6-ff55-473d-8b82-d1e4e3afe0f1] Took 0.08 seconds to deallocate network for instance. [ 872.992154] env[62476]: DEBUG nova.compute.manager [None req-78244f68-4174-4661-bf27-e91b3e28ad11 tempest-FloatingIPsAssociationTestJSON-748075487 tempest-FloatingIPsAssociationTestJSON-748075487-project-member] [instance: d271fa1d-d7f3-4abf-9b5f-69396c4c128c] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 873.021149] env[62476]: DEBUG oslo_concurrency.lockutils [None req-78244f68-4174-4661-bf27-e91b3e28ad11 tempest-FloatingIPsAssociationTestJSON-748075487 tempest-FloatingIPsAssociationTestJSON-748075487-project-member] Lock "d271fa1d-d7f3-4abf-9b5f-69396c4c128c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.513s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.039024] env[62476]: DEBUG nova.compute.manager [None req-bc6e10b8-37c6-411b-90ff-f54796f5d48b tempest-ServerDiagnosticsTest-736104627 tempest-ServerDiagnosticsTest-736104627-project-member] [instance: a0b1a835-21b9-4d9d-a5d7-35479a8c4b6d] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 873.075618] env[62476]: DEBUG nova.compute.manager [None req-bc6e10b8-37c6-411b-90ff-f54796f5d48b tempest-ServerDiagnosticsTest-736104627 tempest-ServerDiagnosticsTest-736104627-project-member] [instance: a0b1a835-21b9-4d9d-a5d7-35479a8c4b6d] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 873.110431] env[62476]: DEBUG oslo_concurrency.lockutils [None req-bc6e10b8-37c6-411b-90ff-f54796f5d48b tempest-ServerDiagnosticsTest-736104627 tempest-ServerDiagnosticsTest-736104627-project-member] Lock "a0b1a835-21b9-4d9d-a5d7-35479a8c4b6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.015s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.126872] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e805632e-e8f8-4d4f-81ea-272903c7345b tempest-ServerAddressesTestJSON-1076321944 tempest-ServerAddressesTestJSON-1076321944-project-member] Lock "1f9496b6-ff55-473d-8b82-d1e4e3afe0f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.303s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.166386] env[62476]: DEBUG nova.compute.manager [None req-4faee79d-d499-4240-8183-c3c0b58a49dc tempest-ServerActionsTestJSON-1910103505 tempest-ServerActionsTestJSON-1910103505-project-member] [instance: da5c9742-9dba-4691-9bda-25858915857b] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 873.215371] env[62476]: DEBUG nova.compute.manager [None req-4faee79d-d499-4240-8183-c3c0b58a49dc tempest-ServerActionsTestJSON-1910103505 tempest-ServerActionsTestJSON-1910103505-project-member] [instance: da5c9742-9dba-4691-9bda-25858915857b] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 873.248397] env[62476]: DEBUG oslo_concurrency.lockutils [None req-4faee79d-d499-4240-8183-c3c0b58a49dc tempest-ServerActionsTestJSON-1910103505 tempest-ServerActionsTestJSON-1910103505-project-member] Lock "da5c9742-9dba-4691-9bda-25858915857b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.689s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.262290] env[62476]: DEBUG nova.compute.manager [None req-2c6ec62f-f8c2-427a-b0fd-53ec2fe9e897 tempest-ServersWithSpecificFlavorTestJSON-1581171592 tempest-ServersWithSpecificFlavorTestJSON-1581171592-project-member] [instance: 3d2bc5b9-cef1-4c45-a493-00a89bd2dfcf] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 873.292750] env[62476]: DEBUG nova.compute.manager [None req-2c6ec62f-f8c2-427a-b0fd-53ec2fe9e897 tempest-ServersWithSpecificFlavorTestJSON-1581171592 tempest-ServersWithSpecificFlavorTestJSON-1581171592-project-member] [instance: 3d2bc5b9-cef1-4c45-a493-00a89bd2dfcf] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 873.325567] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2c6ec62f-f8c2-427a-b0fd-53ec2fe9e897 tempest-ServersWithSpecificFlavorTestJSON-1581171592 tempest-ServersWithSpecificFlavorTestJSON-1581171592-project-member] Lock "3d2bc5b9-cef1-4c45-a493-00a89bd2dfcf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.111s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.338563] env[62476]: DEBUG nova.compute.manager [None req-69cc9f62-bbf1-4f34-856f-a41bdb4f078e tempest-MigrationsAdminTest-1701507521 tempest-MigrationsAdminTest-1701507521-project-member] [instance: ab7da5f4-9460-4d70-a0e5-5a690284d0e0] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 873.364789] env[62476]: DEBUG nova.compute.manager [None req-69cc9f62-bbf1-4f34-856f-a41bdb4f078e tempest-MigrationsAdminTest-1701507521 tempest-MigrationsAdminTest-1701507521-project-member] [instance: ab7da5f4-9460-4d70-a0e5-5a690284d0e0] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 873.394189] env[62476]: DEBUG oslo_concurrency.lockutils [None req-69cc9f62-bbf1-4f34-856f-a41bdb4f078e tempest-MigrationsAdminTest-1701507521 tempest-MigrationsAdminTest-1701507521-project-member] Lock "ab7da5f4-9460-4d70-a0e5-5a690284d0e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.152s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.406326] env[62476]: DEBUG nova.compute.manager [None req-ae344a3a-18e6-4e18-a5f0-9a20e91179db tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: 380bc9c3-8bba-4f26-b938-e4e74543261c] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 873.435403] env[62476]: DEBUG nova.compute.manager [None req-ae344a3a-18e6-4e18-a5f0-9a20e91179db tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: 380bc9c3-8bba-4f26-b938-e4e74543261c] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 873.460689] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ae344a3a-18e6-4e18-a5f0-9a20e91179db tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "380bc9c3-8bba-4f26-b938-e4e74543261c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.772s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.471250] env[62476]: DEBUG nova.compute.manager [None req-ae344a3a-18e6-4e18-a5f0-9a20e91179db tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: 971698f2-c127-4f21-ae3f-3bb863742982] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 873.502659] env[62476]: DEBUG nova.compute.manager [None req-ae344a3a-18e6-4e18-a5f0-9a20e91179db tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: 971698f2-c127-4f21-ae3f-3bb863742982] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 873.527741] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ae344a3a-18e6-4e18-a5f0-9a20e91179db tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "971698f2-c127-4f21-ae3f-3bb863742982" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.812s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.542196] env[62476]: DEBUG nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 873.621681] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.621950] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.624776] env[62476]: INFO nova.compute.claims [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 874.153768] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4130cb-64e5-46a4-b650-a927a71d0ef0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.163732] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733f6abf-eedb-49d6-96ab-e98bd7bbe6c6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.172142] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquiring lock "eca46087-33a7-4e9d-a7ce-6094886704a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.172439] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "eca46087-33a7-4e9d-a7ce-6094886704a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.203669] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88257b1-45b6-4dad-a8da-d335ee8dbd3d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.212811] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6e332e-b07e-4f35-8dee-2cc08e025e99 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.230534] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquiring lock "3295b0cb-15d5-4008-bc76-95b69f2f40a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.230795] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "3295b0cb-15d5-4008-bc76-95b69f2f40a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.231216] env[62476]: DEBUG nova.compute.provider_tree [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.241720] env[62476]: DEBUG nova.scheduler.client.report [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 874.264571] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.642s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.265135] env[62476]: DEBUG nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 874.316708] env[62476]: DEBUG nova.compute.utils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 874.318129] env[62476]: DEBUG nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 874.318534] env[62476]: DEBUG nova.network.neutron [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 874.335060] env[62476]: DEBUG nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 874.443671] env[62476]: DEBUG nova.policy [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9e4673294b1477d93bdae5dfef42927', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16d034f4180f4aeaa8f880c3e6767730', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 874.447543] env[62476]: DEBUG nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 874.484525] env[62476]: DEBUG nova.virt.hardware [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 874.484525] env[62476]: DEBUG nova.virt.hardware [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 874.484525] env[62476]: DEBUG nova.virt.hardware [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 874.484693] env[62476]: DEBUG nova.virt.hardware [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 874.484989] env[62476]: DEBUG nova.virt.hardware [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 874.484989] env[62476]: DEBUG nova.virt.hardware [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 874.488030] env[62476]: DEBUG nova.virt.hardware [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 874.488030] env[62476]: DEBUG nova.virt.hardware [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 874.488030] env[62476]: DEBUG nova.virt.hardware [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 874.488030] env[62476]: DEBUG nova.virt.hardware [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 874.488030] env[62476]: DEBUG nova.virt.hardware [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 874.489606] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cd225a-f9c5-463a-b535-40ae5533f7ce {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.504306] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7563040e-d8e5-409f-b09a-023d4b51383c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.072343] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3b7952cc-98c8-414e-8011-f1089c43829d tempest-ServerPasswordTestJSON-1745514762 tempest-ServerPasswordTestJSON-1745514762-project-member] Acquiring lock "7aab03db-43b4-4884-bc20-0a29058ea2ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.072858] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3b7952cc-98c8-414e-8011-f1089c43829d tempest-ServerPasswordTestJSON-1745514762 tempest-ServerPasswordTestJSON-1745514762-project-member] Lock "7aab03db-43b4-4884-bc20-0a29058ea2ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.093615] env[62476]: DEBUG nova.network.neutron [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Successfully created port: 5944cdb0-c2fb-4637-9775-1115e2ddfd0f {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 876.288797] env[62476]: DEBUG nova.network.neutron [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Successfully updated port: 5944cdb0-c2fb-4637-9775-1115e2ddfd0f {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 876.302025] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "refresh_cache-0561164b-f3f9-446f-b597-4b6d16a32a00" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.302164] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired lock "refresh_cache-0561164b-f3f9-446f-b597-4b6d16a32a00" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.302266] env[62476]: DEBUG nova.network.neutron [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 876.375493] env[62476]: DEBUG nova.network.neutron [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 876.534665] env[62476]: DEBUG oslo_concurrency.lockutils [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "0561164b-f3f9-446f-b597-4b6d16a32a00" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.622160] env[62476]: DEBUG nova.network.neutron [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Updating instance_info_cache with network_info: [{"id": "5944cdb0-c2fb-4637-9775-1115e2ddfd0f", "address": "fa:16:3e:81:fb:e2", "network": {"id": "3ecf6641-8ea2-463b-b2bd-1da0bbd310ec", "bridge": "br-int", "label": "tempest-ImagesTestJSON-686261071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d034f4180f4aeaa8f880c3e6767730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11", "external-id": "nsx-vlan-transportzone-707", "segmentation_id": 707, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5944cdb0-c2", "ovs_interfaceid": "5944cdb0-c2fb-4637-9775-1115e2ddfd0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.633431] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Releasing lock "refresh_cache-0561164b-f3f9-446f-b597-4b6d16a32a00" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.633890] env[62476]: DEBUG nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Instance network_info: |[{"id": "5944cdb0-c2fb-4637-9775-1115e2ddfd0f", "address": "fa:16:3e:81:fb:e2", "network": {"id": "3ecf6641-8ea2-463b-b2bd-1da0bbd310ec", "bridge": "br-int", "label": "tempest-ImagesTestJSON-686261071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d034f4180f4aeaa8f880c3e6767730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11", "external-id": "nsx-vlan-transportzone-707", "segmentation_id": 707, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5944cdb0-c2", "ovs_interfaceid": "5944cdb0-c2fb-4637-9775-1115e2ddfd0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 876.634765] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:fb:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5944cdb0-c2fb-4637-9775-1115e2ddfd0f', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 876.643234] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Creating folder: Project (16d034f4180f4aeaa8f880c3e6767730). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 876.643613] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f9d1809-b07d-492e-a643-6ccd7685975c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.656163] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Created folder: Project (16d034f4180f4aeaa8f880c3e6767730) in parent group-v849485. [ 876.656376] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Creating folder: Instances. Parent ref: group-v849530. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 876.656669] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97e36958-0247-4b8e-b338-abc2d3dc18b6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.666693] env[62476]: DEBUG nova.compute.manager [req-3251afb5-27bb-45ee-9223-bd75537bb6ce req-b5069040-afe3-48c9-abc4-e2565c57f4ab service nova] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Received event network-vif-plugged-5944cdb0-c2fb-4637-9775-1115e2ddfd0f {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 876.666693] env[62476]: DEBUG oslo_concurrency.lockutils [req-3251afb5-27bb-45ee-9223-bd75537bb6ce req-b5069040-afe3-48c9-abc4-e2565c57f4ab service nova] Acquiring lock "0561164b-f3f9-446f-b597-4b6d16a32a00-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.666693] env[62476]: DEBUG oslo_concurrency.lockutils [req-3251afb5-27bb-45ee-9223-bd75537bb6ce req-b5069040-afe3-48c9-abc4-e2565c57f4ab service nova] Lock "0561164b-f3f9-446f-b597-4b6d16a32a00-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.666693] env[62476]: DEBUG oslo_concurrency.lockutils [req-3251afb5-27bb-45ee-9223-bd75537bb6ce req-b5069040-afe3-48c9-abc4-e2565c57f4ab service nova] Lock "0561164b-f3f9-446f-b597-4b6d16a32a00-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.667092] env[62476]: DEBUG nova.compute.manager [req-3251afb5-27bb-45ee-9223-bd75537bb6ce req-b5069040-afe3-48c9-abc4-e2565c57f4ab service nova] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] No waiting events found dispatching network-vif-plugged-5944cdb0-c2fb-4637-9775-1115e2ddfd0f {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 876.667092] env[62476]: WARNING nova.compute.manager [req-3251afb5-27bb-45ee-9223-bd75537bb6ce req-b5069040-afe3-48c9-abc4-e2565c57f4ab service nova] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Received unexpected event network-vif-plugged-5944cdb0-c2fb-4637-9775-1115e2ddfd0f for instance with vm_state building and task_state deleting. [ 876.668933] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Created folder: Instances in parent group-v849530. [ 876.669201] env[62476]: DEBUG oslo.service.loopingcall [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 876.669522] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 876.669740] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89f2c6e9-41f7-46f7-9d21-0a8e2c7b3b55 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.690863] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 876.690863] env[62476]: value = "task-4319066" [ 876.690863] env[62476]: _type = "Task" [ 876.690863] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.699020] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319066, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.201163] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319066, 'name': CreateVM_Task, 'duration_secs': 0.323181} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.201376] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 877.202098] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.202300] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.202624] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 877.202883] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bc7d791-f9f8-4004-ae19-b85b12cd7ce8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.208592] env[62476]: DEBUG oslo_vmware.api [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for the task: (returnval){ [ 877.208592] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]529603fe-7382-a116-94cf-b3d725f3f3f2" [ 877.208592] env[62476]: _type = "Task" [ 877.208592] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.217792] env[62476]: DEBUG oslo_vmware.api [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]529603fe-7382-a116-94cf-b3d725f3f3f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.728019] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.728019] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 877.728470] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.579761] env[62476]: DEBUG nova.compute.manager [req-c6f9aa6c-5faf-48b8-88a9-a28702818376 req-20bf8b33-27d6-40d3-95d3-e0afc490cde3 service nova] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Received event network-changed-5944cdb0-c2fb-4637-9775-1115e2ddfd0f {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 879.580047] env[62476]: DEBUG nova.compute.manager [req-c6f9aa6c-5faf-48b8-88a9-a28702818376 req-20bf8b33-27d6-40d3-95d3-e0afc490cde3 service nova] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Refreshing instance network info cache due to event network-changed-5944cdb0-c2fb-4637-9775-1115e2ddfd0f. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 879.581317] env[62476]: DEBUG oslo_concurrency.lockutils [req-c6f9aa6c-5faf-48b8-88a9-a28702818376 req-20bf8b33-27d6-40d3-95d3-e0afc490cde3 service nova] Acquiring lock "refresh_cache-0561164b-f3f9-446f-b597-4b6d16a32a00" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.581317] env[62476]: DEBUG oslo_concurrency.lockutils [req-c6f9aa6c-5faf-48b8-88a9-a28702818376 req-20bf8b33-27d6-40d3-95d3-e0afc490cde3 service nova] Acquired lock "refresh_cache-0561164b-f3f9-446f-b597-4b6d16a32a00" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.581317] env[62476]: DEBUG nova.network.neutron [req-c6f9aa6c-5faf-48b8-88a9-a28702818376 req-20bf8b33-27d6-40d3-95d3-e0afc490cde3 service nova] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Refreshing network info cache for port 5944cdb0-c2fb-4637-9775-1115e2ddfd0f {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 879.994344] env[62476]: DEBUG nova.network.neutron [req-c6f9aa6c-5faf-48b8-88a9-a28702818376 req-20bf8b33-27d6-40d3-95d3-e0afc490cde3 service nova] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Updated VIF entry in instance network info cache for port 5944cdb0-c2fb-4637-9775-1115e2ddfd0f. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 879.995373] env[62476]: DEBUG nova.network.neutron [req-c6f9aa6c-5faf-48b8-88a9-a28702818376 req-20bf8b33-27d6-40d3-95d3-e0afc490cde3 service nova] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Updating instance_info_cache with network_info: [{"id": "5944cdb0-c2fb-4637-9775-1115e2ddfd0f", "address": "fa:16:3e:81:fb:e2", "network": {"id": "3ecf6641-8ea2-463b-b2bd-1da0bbd310ec", "bridge": "br-int", "label": "tempest-ImagesTestJSON-686261071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d034f4180f4aeaa8f880c3e6767730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11", "external-id": "nsx-vlan-transportzone-707", "segmentation_id": 707, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5944cdb0-c2", "ovs_interfaceid": "5944cdb0-c2fb-4637-9775-1115e2ddfd0f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.008886] env[62476]: DEBUG oslo_concurrency.lockutils [req-c6f9aa6c-5faf-48b8-88a9-a28702818376 req-20bf8b33-27d6-40d3-95d3-e0afc490cde3 service nova] Releasing lock "refresh_cache-0561164b-f3f9-446f-b597-4b6d16a32a00" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.549174] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f6122ed3-43ce-497f-9465-9e2f2a90d52e tempest-AttachVolumeTestJSON-2018915431 tempest-AttachVolumeTestJSON-2018915431-project-member] Acquiring lock "ade4daab-4b02-4664-b745-a0c799d8415d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.549556] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f6122ed3-43ce-497f-9465-9e2f2a90d52e tempest-AttachVolumeTestJSON-2018915431 tempest-AttachVolumeTestJSON-2018915431-project-member] Lock "ade4daab-4b02-4664-b745-a0c799d8415d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.001215] env[62476]: DEBUG oslo_concurrency.lockutils [None req-59b74bcb-bd32-4100-84b7-3950a1100cff tempest-ServersNegativeTestJSON-2144930018 tempest-ServersNegativeTestJSON-2144930018-project-member] Acquiring lock "cd2482db-1c9e-4b1a-bb79-b7250cb863a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.001215] env[62476]: DEBUG oslo_concurrency.lockutils [None req-59b74bcb-bd32-4100-84b7-3950a1100cff tempest-ServersNegativeTestJSON-2144930018 tempest-ServersNegativeTestJSON-2144930018-project-member] Lock "cd2482db-1c9e-4b1a-bb79-b7250cb863a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.841200] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cd41f60f-7b50-4fad-bdf0-cdc7e1389792 tempest-VolumesAdminNegativeTest-1065309950 tempest-VolumesAdminNegativeTest-1065309950-project-member] Acquiring lock "2c553a0b-1f8b-42aa-8b64-e22bc3cac45e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.841458] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cd41f60f-7b50-4fad-bdf0-cdc7e1389792 tempest-VolumesAdminNegativeTest-1065309950 tempest-VolumesAdminNegativeTest-1065309950-project-member] Lock "2c553a0b-1f8b-42aa-8b64-e22bc3cac45e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.838781] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab8492c9-a05b-4c8c-aa92-779d2fd554a1 tempest-InstanceActionsTestJSON-845501273 tempest-InstanceActionsTestJSON-845501273-project-member] Acquiring lock "02b24610-323b-47b0-9c3e-f397cb48835e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.839186] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab8492c9-a05b-4c8c-aa92-779d2fd554a1 tempest-InstanceActionsTestJSON-845501273 tempest-InstanceActionsTestJSON-845501273-project-member] Lock "02b24610-323b-47b0-9c3e-f397cb48835e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.255364] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6ccdac5e-dac4-4774-91df-45313dc42124 tempest-ServerGroupTestJSON-560967850 tempest-ServerGroupTestJSON-560967850-project-member] Acquiring lock "4439f302-8fa4-452a-97d8-4d6c1fef36d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.255364] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6ccdac5e-dac4-4774-91df-45313dc42124 tempest-ServerGroupTestJSON-560967850 tempest-ServerGroupTestJSON-560967850-project-member] Lock "4439f302-8fa4-452a-97d8-4d6c1fef36d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.742704] env[62476]: DEBUG oslo_concurrency.lockutils [None req-acfdfcd5-59f8-4392-b9e2-8d61199ba69d tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] Acquiring lock "1241b06a-696d-4f96-961e-95129b1ba674" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.743081] env[62476]: DEBUG oslo_concurrency.lockutils [None req-acfdfcd5-59f8-4392-b9e2-8d61199ba69d tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] Lock "1241b06a-696d-4f96-961e-95129b1ba674" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.560377] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5e11bcea-2717-4d76-82e2-bfcbcdcca495 tempest-ServerShowV257Test-510154599 tempest-ServerShowV257Test-510154599-project-member] Acquiring lock "4be38fcc-5fa9-43b3-ab33-544812082b2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.560651] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5e11bcea-2717-4d76-82e2-bfcbcdcca495 tempest-ServerShowV257Test-510154599 tempest-ServerShowV257Test-510154599-project-member] Lock "4be38fcc-5fa9-43b3-ab33-544812082b2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.812767] env[62476]: WARNING oslo_vmware.rw_handles [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 917.812767] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 917.812767] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 917.812767] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 917.812767] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 917.812767] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 917.812767] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 917.812767] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 917.812767] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 917.812767] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 917.812767] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 917.812767] env[62476]: ERROR oslo_vmware.rw_handles [ 917.813266] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/07c92e2c-0cbf-4a03-91ff-8f2a0a8d02a4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 917.815146] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 917.815420] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Copying Virtual Disk [datastore1] vmware_temp/07c92e2c-0cbf-4a03-91ff-8f2a0a8d02a4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/07c92e2c-0cbf-4a03-91ff-8f2a0a8d02a4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 917.815713] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28b9b7c4-35b2-4a12-b05e-0968a0f32ebc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.825151] env[62476]: DEBUG oslo_vmware.api [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for the task: (returnval){ [ 917.825151] env[62476]: value = "task-4319067" [ 917.825151] env[62476]: _type = "Task" [ 917.825151] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.839475] env[62476]: DEBUG oslo_vmware.api [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': task-4319067, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.337051] env[62476]: DEBUG oslo_vmware.exceptions [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 918.337051] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.337436] env[62476]: ERROR nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 918.337436] env[62476]: Faults: ['InvalidArgument'] [ 918.337436] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Traceback (most recent call last): [ 918.337436] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 918.337436] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] yield resources [ 918.337436] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 918.337436] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] self.driver.spawn(context, instance, image_meta, [ 918.337436] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 918.337436] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 918.337436] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 918.337436] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] self._fetch_image_if_missing(context, vi) [ 918.337436] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 918.337929] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] image_cache(vi, tmp_image_ds_loc) [ 918.337929] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 918.337929] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] vm_util.copy_virtual_disk( [ 918.337929] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 918.337929] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] session._wait_for_task(vmdk_copy_task) [ 918.337929] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 918.337929] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] return self.wait_for_task(task_ref) [ 918.337929] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 918.337929] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] return evt.wait() [ 918.337929] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 918.337929] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] result = hub.switch() [ 918.337929] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 918.337929] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] return self.greenlet.switch() [ 918.338364] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 918.338364] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] self.f(*self.args, **self.kw) [ 918.338364] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 918.338364] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] raise exceptions.translate_fault(task_info.error) [ 918.338364] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 918.338364] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Faults: ['InvalidArgument'] [ 918.338364] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] [ 918.338364] env[62476]: INFO nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Terminating instance [ 918.339544] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.339761] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 918.340041] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93fc2459-409c-4d26-83ef-82a2a1537681 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.342546] env[62476]: DEBUG nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 918.342731] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 918.343562] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2701245b-dc5f-4bb4-af18-8dea04b15da2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.352855] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 918.354645] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-648d3585-4342-49ef-a3bc-b3f8a1ddde62 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.357052] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 918.357347] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 918.358349] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc8f05d8-d81e-4217-8dbd-af919dbac3bf {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.366406] env[62476]: DEBUG oslo_vmware.api [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Waiting for the task: (returnval){ [ 918.366406] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]524b116e-45fa-0fba-8ec7-572b49f1577f" [ 918.366406] env[62476]: _type = "Task" [ 918.366406] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.381825] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 918.382120] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Creating directory with path [datastore1] vmware_temp/91256662-edcd-49a2-baee-3e5c028d41d9/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 918.382382] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d42581e5-354c-4ce1-a27e-d8f5a87b7050 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.397218] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Created directory with path [datastore1] vmware_temp/91256662-edcd-49a2-baee-3e5c028d41d9/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 918.397502] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Fetch image to [datastore1] vmware_temp/91256662-edcd-49a2-baee-3e5c028d41d9/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 918.397767] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/91256662-edcd-49a2-baee-3e5c028d41d9/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 918.398635] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb69139-182e-4693-94df-bad3df10a02d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.407615] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0659fde4-9923-4ff6-a320-5a28108e9ca4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.420338] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd792499-5202-4e05-aa03-e44606ac05a0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.454992] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab02a57b-ceb6-4290-9c2d-fc5c8ca09fed {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.457855] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 918.458034] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 918.458225] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Deleting the datastore file [datastore1] 6c3f0540-a722-4a13-9982-f40c2d6ce9b1 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 918.458573] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc3efb2f-2a9f-418b-b67d-3d77f4de34eb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.465817] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-87a50cf5-c8c1-4748-acc4-9ce5369e5e47 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.467921] env[62476]: DEBUG oslo_vmware.api [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for the task: (returnval){ [ 918.467921] env[62476]: value = "task-4319069" [ 918.467921] env[62476]: _type = "Task" [ 918.467921] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.477123] env[62476]: DEBUG oslo_vmware.api [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': task-4319069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.499799] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 918.643590] env[62476]: DEBUG oslo_vmware.rw_handles [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/91256662-edcd-49a2-baee-3e5c028d41d9/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 918.704759] env[62476]: DEBUG oslo_vmware.rw_handles [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 918.705023] env[62476]: DEBUG oslo_vmware.rw_handles [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/91256662-edcd-49a2-baee-3e5c028d41d9/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 918.984476] env[62476]: DEBUG oslo_vmware.api [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': task-4319069, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081867} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.984934] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 918.985259] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 918.985552] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 918.985854] env[62476]: INFO nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Took 0.64 seconds to destroy the instance on the hypervisor. [ 918.988855] env[62476]: DEBUG nova.compute.claims [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 918.989189] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.989556] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.405103] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-948c7026-39de-4346-88b0-b6bffa4583ba {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.415267] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebbc611c-374e-408d-8d95-db547df9d25d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.450242] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e035813e-43a5-4924-b993-9e670adc91d6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.459788] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c1dfda-0d8b-4be1-ae56-274f48c2d39d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.476285] env[62476]: DEBUG nova.compute.provider_tree [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.487049] env[62476]: DEBUG nova.scheduler.client.report [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 919.506813] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.517s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.507605] env[62476]: ERROR nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 919.507605] env[62476]: Faults: ['InvalidArgument'] [ 919.507605] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Traceback (most recent call last): [ 919.507605] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 919.507605] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] self.driver.spawn(context, instance, image_meta, [ 919.507605] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 919.507605] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 919.507605] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 919.507605] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] self._fetch_image_if_missing(context, vi) [ 919.507605] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 919.507605] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] image_cache(vi, tmp_image_ds_loc) [ 919.507605] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 919.508027] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] vm_util.copy_virtual_disk( [ 919.508027] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 919.508027] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] session._wait_for_task(vmdk_copy_task) [ 919.508027] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 919.508027] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] return self.wait_for_task(task_ref) [ 919.508027] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 919.508027] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] return evt.wait() [ 919.508027] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 919.508027] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] result = hub.switch() [ 919.508027] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 919.508027] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] return self.greenlet.switch() [ 919.508027] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 919.508027] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] self.f(*self.args, **self.kw) [ 919.508418] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 919.508418] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] raise exceptions.translate_fault(task_info.error) [ 919.508418] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 919.508418] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Faults: ['InvalidArgument'] [ 919.508418] env[62476]: ERROR nova.compute.manager [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] [ 919.508652] env[62476]: DEBUG nova.compute.utils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 919.510694] env[62476]: DEBUG nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Build of instance 6c3f0540-a722-4a13-9982-f40c2d6ce9b1 was re-scheduled: A specified parameter was not correct: fileType [ 919.510694] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 919.511255] env[62476]: DEBUG nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 919.511520] env[62476]: DEBUG nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 919.511766] env[62476]: DEBUG nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 919.511943] env[62476]: DEBUG nova.network.neutron [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 919.993831] env[62476]: DEBUG nova.network.neutron [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.008137] env[62476]: INFO nova.compute.manager [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Took 0.50 seconds to deallocate network for instance. [ 920.123519] env[62476]: INFO nova.scheduler.client.report [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Deleted allocations for instance 6c3f0540-a722-4a13-9982-f40c2d6ce9b1 [ 920.151672] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0f7f80a2-720e-4536-90de-8deaa3d3e5fe tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "6c3f0540-a722-4a13-9982-f40c2d6ce9b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 295.211s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.153547] env[62476]: DEBUG oslo_concurrency.lockutils [None req-33cf0e16-171c-4f94-9565-eaed058f1093 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "6c3f0540-a722-4a13-9982-f40c2d6ce9b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 93.173s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.153926] env[62476]: DEBUG oslo_concurrency.lockutils [None req-33cf0e16-171c-4f94-9565-eaed058f1093 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "6c3f0540-a722-4a13-9982-f40c2d6ce9b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.154167] env[62476]: DEBUG oslo_concurrency.lockutils [None req-33cf0e16-171c-4f94-9565-eaed058f1093 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "6c3f0540-a722-4a13-9982-f40c2d6ce9b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.154342] env[62476]: DEBUG oslo_concurrency.lockutils [None req-33cf0e16-171c-4f94-9565-eaed058f1093 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "6c3f0540-a722-4a13-9982-f40c2d6ce9b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.156467] env[62476]: INFO nova.compute.manager [None req-33cf0e16-171c-4f94-9565-eaed058f1093 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Terminating instance [ 920.158397] env[62476]: DEBUG nova.compute.manager [None req-33cf0e16-171c-4f94-9565-eaed058f1093 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 920.158592] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-33cf0e16-171c-4f94-9565-eaed058f1093 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 920.159108] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6cd0b396-1fb1-46a2-b9ff-ceae10adbfaf {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.170333] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afec849-29ec-4825-aa36-ce8132e4da61 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.182086] env[62476]: DEBUG nova.compute.manager [None req-642fde63-9cbf-42b1-b949-64d527549465 tempest-AttachInterfacesUnderV243Test-1728856832 tempest-AttachInterfacesUnderV243Test-1728856832-project-member] [instance: 327a282e-b502-4644-a152-0e77ec399fe7] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 920.207015] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-33cf0e16-171c-4f94-9565-eaed058f1093 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6c3f0540-a722-4a13-9982-f40c2d6ce9b1 could not be found. [ 920.207015] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-33cf0e16-171c-4f94-9565-eaed058f1093 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 920.207015] env[62476]: INFO nova.compute.manager [None req-33cf0e16-171c-4f94-9565-eaed058f1093 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 920.207189] env[62476]: DEBUG oslo.service.loopingcall [None req-33cf0e16-171c-4f94-9565-eaed058f1093 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 920.207448] env[62476]: DEBUG nova.compute.manager [-] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 920.207496] env[62476]: DEBUG nova.network.neutron [-] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 920.219024] env[62476]: DEBUG nova.compute.manager [None req-642fde63-9cbf-42b1-b949-64d527549465 tempest-AttachInterfacesUnderV243Test-1728856832 tempest-AttachInterfacesUnderV243Test-1728856832-project-member] [instance: 327a282e-b502-4644-a152-0e77ec399fe7] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 920.242099] env[62476]: DEBUG oslo_concurrency.lockutils [None req-642fde63-9cbf-42b1-b949-64d527549465 tempest-AttachInterfacesUnderV243Test-1728856832 tempest-AttachInterfacesUnderV243Test-1728856832-project-member] Lock "327a282e-b502-4644-a152-0e77ec399fe7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.413s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.245903] env[62476]: DEBUG nova.network.neutron [-] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.253081] env[62476]: DEBUG nova.compute.manager [None req-5a45573f-1ac1-46cf-8348-0ccec7ad0efa tempest-ServerRescueTestJSON-824594041 tempest-ServerRescueTestJSON-824594041-project-member] [instance: 4770186f-8bd4-455c-a21d-f79e2230fa4e] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 920.256746] env[62476]: INFO nova.compute.manager [-] [instance: 6c3f0540-a722-4a13-9982-f40c2d6ce9b1] Took 0.05 seconds to deallocate network for instance. [ 920.276126] env[62476]: DEBUG nova.compute.manager [None req-5a45573f-1ac1-46cf-8348-0ccec7ad0efa tempest-ServerRescueTestJSON-824594041 tempest-ServerRescueTestJSON-824594041-project-member] [instance: 4770186f-8bd4-455c-a21d-f79e2230fa4e] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 920.297662] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5a45573f-1ac1-46cf-8348-0ccec7ad0efa tempest-ServerRescueTestJSON-824594041 tempest-ServerRescueTestJSON-824594041-project-member] Lock "4770186f-8bd4-455c-a21d-f79e2230fa4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.158s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.315910] env[62476]: DEBUG nova.compute.manager [None req-2c2db410-b875-4528-a636-0e8704c6a1bf tempest-ImagesOneServerNegativeTestJSON-1106775302 tempest-ImagesOneServerNegativeTestJSON-1106775302-project-member] [instance: c60488a7-7d3b-49af-8b4d-9aad718a37a4] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 920.339600] env[62476]: DEBUG nova.compute.manager [None req-2c2db410-b875-4528-a636-0e8704c6a1bf tempest-ImagesOneServerNegativeTestJSON-1106775302 tempest-ImagesOneServerNegativeTestJSON-1106775302-project-member] [instance: c60488a7-7d3b-49af-8b4d-9aad718a37a4] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 920.365745] env[62476]: DEBUG oslo_concurrency.lockutils [None req-33cf0e16-171c-4f94-9565-eaed058f1093 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "6c3f0540-a722-4a13-9982-f40c2d6ce9b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.212s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.371541] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2c2db410-b875-4528-a636-0e8704c6a1bf tempest-ImagesOneServerNegativeTestJSON-1106775302 tempest-ImagesOneServerNegativeTestJSON-1106775302-project-member] Lock "c60488a7-7d3b-49af-8b4d-9aad718a37a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.026s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.381373] env[62476]: DEBUG nova.compute.manager [None req-0a4fd872-d827-468d-ae65-eb6f82c93d5b tempest-VolumesAdminNegativeTest-1065309950 tempest-VolumesAdminNegativeTest-1065309950-project-member] [instance: 02f37f91-5ee0-46bb-a5e2-ec8256c1f22c] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 920.407235] env[62476]: DEBUG nova.compute.manager [None req-0a4fd872-d827-468d-ae65-eb6f82c93d5b tempest-VolumesAdminNegativeTest-1065309950 tempest-VolumesAdminNegativeTest-1065309950-project-member] [instance: 02f37f91-5ee0-46bb-a5e2-ec8256c1f22c] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 920.430329] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0a4fd872-d827-468d-ae65-eb6f82c93d5b tempest-VolumesAdminNegativeTest-1065309950 tempest-VolumesAdminNegativeTest-1065309950-project-member] Lock "02f37f91-5ee0-46bb-a5e2-ec8256c1f22c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.929s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.440638] env[62476]: DEBUG nova.compute.manager [None req-cd51b49c-ea51-45e1-864e-6014e6a9e45c tempest-ServersAdminNegativeTestJSON-35601590 tempest-ServersAdminNegativeTestJSON-35601590-project-member] [instance: 9ea6880c-469b-4c66-927e-442a41e22163] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 920.466821] env[62476]: DEBUG nova.compute.manager [None req-cd51b49c-ea51-45e1-864e-6014e6a9e45c tempest-ServersAdminNegativeTestJSON-35601590 tempest-ServersAdminNegativeTestJSON-35601590-project-member] [instance: 9ea6880c-469b-4c66-927e-442a41e22163] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 920.490333] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cd51b49c-ea51-45e1-864e-6014e6a9e45c tempest-ServersAdminNegativeTestJSON-35601590 tempest-ServersAdminNegativeTestJSON-35601590-project-member] Lock "9ea6880c-469b-4c66-927e-442a41e22163" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.968s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.500482] env[62476]: DEBUG nova.compute.manager [None req-c0b4a918-7c0c-4a85-8d45-3f47506313ad tempest-ServerDiagnosticsV248Test-1209440432 tempest-ServerDiagnosticsV248Test-1209440432-project-member] [instance: 7c661fb6-abb7-486b-9188-f8d4dd6bb1a0] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 920.528028] env[62476]: DEBUG nova.compute.manager [None req-c0b4a918-7c0c-4a85-8d45-3f47506313ad tempest-ServerDiagnosticsV248Test-1209440432 tempest-ServerDiagnosticsV248Test-1209440432-project-member] [instance: 7c661fb6-abb7-486b-9188-f8d4dd6bb1a0] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 920.551024] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c0b4a918-7c0c-4a85-8d45-3f47506313ad tempest-ServerDiagnosticsV248Test-1209440432 tempest-ServerDiagnosticsV248Test-1209440432-project-member] Lock "7c661fb6-abb7-486b-9188-f8d4dd6bb1a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.216s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.561295] env[62476]: DEBUG nova.compute.manager [None req-387d0014-0337-49ad-845e-0d4112a73d6f tempest-ServerDiagnosticsNegativeTest-163132470 tempest-ServerDiagnosticsNegativeTest-163132470-project-member] [instance: 7620aead-4244-47ac-be0a-6614d03ec2c6] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 920.587603] env[62476]: DEBUG nova.compute.manager [None req-387d0014-0337-49ad-845e-0d4112a73d6f tempest-ServerDiagnosticsNegativeTest-163132470 tempest-ServerDiagnosticsNegativeTest-163132470-project-member] [instance: 7620aead-4244-47ac-be0a-6614d03ec2c6] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 920.611088] env[62476]: DEBUG oslo_concurrency.lockutils [None req-387d0014-0337-49ad-845e-0d4112a73d6f tempest-ServerDiagnosticsNegativeTest-163132470 tempest-ServerDiagnosticsNegativeTest-163132470-project-member] Lock "7620aead-4244-47ac-be0a-6614d03ec2c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.595s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.622190] env[62476]: DEBUG nova.compute.manager [None req-5b1017b3-8272-45fc-8ecd-57c0aa6a49d1 tempest-InstanceActionsV221TestJSON-386317821 tempest-InstanceActionsV221TestJSON-386317821-project-member] [instance: 8ce40d44-062c-47cf-be36-d8ed6d924094] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 920.647763] env[62476]: DEBUG nova.compute.manager [None req-5b1017b3-8272-45fc-8ecd-57c0aa6a49d1 tempest-InstanceActionsV221TestJSON-386317821 tempest-InstanceActionsV221TestJSON-386317821-project-member] [instance: 8ce40d44-062c-47cf-be36-d8ed6d924094] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 920.671583] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5b1017b3-8272-45fc-8ecd-57c0aa6a49d1 tempest-InstanceActionsV221TestJSON-386317821 tempest-InstanceActionsV221TestJSON-386317821-project-member] Lock "8ce40d44-062c-47cf-be36-d8ed6d924094" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.254s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.683288] env[62476]: DEBUG nova.compute.manager [None req-9ef8f4ca-9438-414b-b2f8-1f94a82c819e tempest-ServersTestBootFromVolume-495427889 tempest-ServersTestBootFromVolume-495427889-project-member] [instance: a2008090-914b-448c-8c60-776d4032e091] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 920.712382] env[62476]: DEBUG nova.compute.manager [None req-9ef8f4ca-9438-414b-b2f8-1f94a82c819e tempest-ServersTestBootFromVolume-495427889 tempest-ServersTestBootFromVolume-495427889-project-member] [instance: a2008090-914b-448c-8c60-776d4032e091] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 920.737402] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9ef8f4ca-9438-414b-b2f8-1f94a82c819e tempest-ServersTestBootFromVolume-495427889 tempest-ServersTestBootFromVolume-495427889-project-member] Lock "a2008090-914b-448c-8c60-776d4032e091" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.491s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.748025] env[62476]: DEBUG nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 920.806732] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.806732] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.807144] env[62476]: INFO nova.compute.claims [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 921.221756] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0907226-946d-4c9d-bc81-8e336196bc2c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.230196] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2971e060-df4f-456c-b5e8-8208b13523b0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.262717] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b299314-94b3-4b9f-8ab0-87066335851d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.271090] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2224a227-16df-47ef-b186-23718b1c16b0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.286613] env[62476]: DEBUG nova.compute.provider_tree [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.296663] env[62476]: DEBUG nova.scheduler.client.report [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 921.313410] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.508s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.313956] env[62476]: DEBUG nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 921.353446] env[62476]: DEBUG nova.compute.utils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 921.354819] env[62476]: DEBUG nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 921.354985] env[62476]: DEBUG nova.network.neutron [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 921.368023] env[62476]: DEBUG nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 921.441710] env[62476]: DEBUG nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 921.446993] env[62476]: DEBUG nova.policy [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1049e5f09cf0462fa40943bc3f5cc739', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7638c00f848b483283237ea78e8d03fc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 921.472224] env[62476]: DEBUG nova.virt.hardware [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 921.472499] env[62476]: DEBUG nova.virt.hardware [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 921.472736] env[62476]: DEBUG nova.virt.hardware [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 921.472931] env[62476]: DEBUG nova.virt.hardware [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 921.473332] env[62476]: DEBUG nova.virt.hardware [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 921.473570] env[62476]: DEBUG nova.virt.hardware [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 921.473886] env[62476]: DEBUG nova.virt.hardware [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 921.474133] env[62476]: DEBUG nova.virt.hardware [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 921.474375] env[62476]: DEBUG nova.virt.hardware [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 921.474602] env[62476]: DEBUG nova.virt.hardware [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 921.474945] env[62476]: DEBUG nova.virt.hardware [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 921.475923] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e48c56-df1b-43ce-a8ef-be8107af02fe {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.485491] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973b67c1-82a8-42c5-8903-5ec5f00daa3f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.813189] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6e86e42d-d2c3-452f-a762-ec5afd76acaa tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "e1c8d6f6-b179-4e47-ac13-9abeb84e5a53" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.813424] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6e86e42d-d2c3-452f-a762-ec5afd76acaa tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "e1c8d6f6-b179-4e47-ac13-9abeb84e5a53" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.995905] env[62476]: DEBUG nova.network.neutron [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Successfully created port: 0946bc11-3345-4324-aae9-826311e73130 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 922.480799] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "1e005b4d-7f94-4263-ba5d-303af209c408" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.206658] env[62476]: DEBUG nova.network.neutron [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Successfully updated port: 0946bc11-3345-4324-aae9-826311e73130 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 923.220927] env[62476]: DEBUG nova.compute.manager [req-6da6346f-2cfe-4e6b-9c1f-e1ab29177a43 req-51b7119f-71b7-4935-b1f8-6f8033ae5887 service nova] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Received event network-vif-plugged-0946bc11-3345-4324-aae9-826311e73130 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 923.221726] env[62476]: DEBUG oslo_concurrency.lockutils [req-6da6346f-2cfe-4e6b-9c1f-e1ab29177a43 req-51b7119f-71b7-4935-b1f8-6f8033ae5887 service nova] Acquiring lock "1e005b4d-7f94-4263-ba5d-303af209c408-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.221995] env[62476]: DEBUG oslo_concurrency.lockutils [req-6da6346f-2cfe-4e6b-9c1f-e1ab29177a43 req-51b7119f-71b7-4935-b1f8-6f8033ae5887 service nova] Lock "1e005b4d-7f94-4263-ba5d-303af209c408-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.222239] env[62476]: DEBUG oslo_concurrency.lockutils [req-6da6346f-2cfe-4e6b-9c1f-e1ab29177a43 req-51b7119f-71b7-4935-b1f8-6f8033ae5887 service nova] Lock "1e005b4d-7f94-4263-ba5d-303af209c408-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.222445] env[62476]: DEBUG nova.compute.manager [req-6da6346f-2cfe-4e6b-9c1f-e1ab29177a43 req-51b7119f-71b7-4935-b1f8-6f8033ae5887 service nova] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] No waiting events found dispatching network-vif-plugged-0946bc11-3345-4324-aae9-826311e73130 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 923.222713] env[62476]: WARNING nova.compute.manager [req-6da6346f-2cfe-4e6b-9c1f-e1ab29177a43 req-51b7119f-71b7-4935-b1f8-6f8033ae5887 service nova] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Received unexpected event network-vif-plugged-0946bc11-3345-4324-aae9-826311e73130 for instance with vm_state building and task_state deleting. [ 923.228872] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "refresh_cache-1e005b4d-7f94-4263-ba5d-303af209c408" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.229027] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquired lock "refresh_cache-1e005b4d-7f94-4263-ba5d-303af209c408" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.229178] env[62476]: DEBUG nova.network.neutron [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 923.304322] env[62476]: DEBUG nova.network.neutron [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 923.588455] env[62476]: DEBUG nova.network.neutron [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Updating instance_info_cache with network_info: [{"id": "0946bc11-3345-4324-aae9-826311e73130", "address": "fa:16:3e:a6:c5:0f", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0946bc11-33", "ovs_interfaceid": "0946bc11-3345-4324-aae9-826311e73130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.605141] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Releasing lock "refresh_cache-1e005b4d-7f94-4263-ba5d-303af209c408" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.605462] env[62476]: DEBUG nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Instance network_info: |[{"id": "0946bc11-3345-4324-aae9-826311e73130", "address": "fa:16:3e:a6:c5:0f", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0946bc11-33", "ovs_interfaceid": "0946bc11-3345-4324-aae9-826311e73130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 923.605882] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:c5:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '715e3f37-7401-48fb-a0ee-59d340b40de1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0946bc11-3345-4324-aae9-826311e73130', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 923.613462] env[62476]: DEBUG oslo.service.loopingcall [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 923.614054] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 923.614674] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f3a7430-5642-4da8-8495-c3eaeb36d2ae {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.638441] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 923.638441] env[62476]: value = "task-4319070" [ 923.638441] env[62476]: _type = "Task" [ 923.638441] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.647494] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319070, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.150200] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319070, 'name': CreateVM_Task, 'duration_secs': 0.343958} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.150460] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 924.151145] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.151316] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.151658] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 924.151925] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-848a63ea-c9fc-4866-b2cc-5219a6db1e0a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.157422] env[62476]: DEBUG oslo_vmware.api [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Waiting for the task: (returnval){ [ 924.157422] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]521ad046-d110-5440-91c3-7378f98c6592" [ 924.157422] env[62476]: _type = "Task" [ 924.157422] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.167437] env[62476]: DEBUG oslo_vmware.api [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]521ad046-d110-5440-91c3-7378f98c6592, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.668123] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.668405] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 924.668612] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.299349] env[62476]: DEBUG nova.compute.manager [req-e58d413e-d150-43f8-a08c-ece3c83bcf69 req-de561b29-c230-49e8-860f-f6d4647bc25f service nova] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Received event network-changed-0946bc11-3345-4324-aae9-826311e73130 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 925.299480] env[62476]: DEBUG nova.compute.manager [req-e58d413e-d150-43f8-a08c-ece3c83bcf69 req-de561b29-c230-49e8-860f-f6d4647bc25f service nova] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Refreshing instance network info cache due to event network-changed-0946bc11-3345-4324-aae9-826311e73130. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 925.299693] env[62476]: DEBUG oslo_concurrency.lockutils [req-e58d413e-d150-43f8-a08c-ece3c83bcf69 req-de561b29-c230-49e8-860f-f6d4647bc25f service nova] Acquiring lock "refresh_cache-1e005b4d-7f94-4263-ba5d-303af209c408" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.299859] env[62476]: DEBUG oslo_concurrency.lockutils [req-e58d413e-d150-43f8-a08c-ece3c83bcf69 req-de561b29-c230-49e8-860f-f6d4647bc25f service nova] Acquired lock "refresh_cache-1e005b4d-7f94-4263-ba5d-303af209c408" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.300031] env[62476]: DEBUG nova.network.neutron [req-e58d413e-d150-43f8-a08c-ece3c83bcf69 req-de561b29-c230-49e8-860f-f6d4647bc25f service nova] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Refreshing network info cache for port 0946bc11-3345-4324-aae9-826311e73130 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 925.613735] env[62476]: DEBUG nova.network.neutron [req-e58d413e-d150-43f8-a08c-ece3c83bcf69 req-de561b29-c230-49e8-860f-f6d4647bc25f service nova] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Updated VIF entry in instance network info cache for port 0946bc11-3345-4324-aae9-826311e73130. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 925.614139] env[62476]: DEBUG nova.network.neutron [req-e58d413e-d150-43f8-a08c-ece3c83bcf69 req-de561b29-c230-49e8-860f-f6d4647bc25f service nova] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Updating instance_info_cache with network_info: [{"id": "0946bc11-3345-4324-aae9-826311e73130", "address": "fa:16:3e:a6:c5:0f", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0946bc11-33", "ovs_interfaceid": "0946bc11-3345-4324-aae9-826311e73130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.624742] env[62476]: DEBUG oslo_concurrency.lockutils [req-e58d413e-d150-43f8-a08c-ece3c83bcf69 req-de561b29-c230-49e8-860f-f6d4647bc25f service nova] Releasing lock "refresh_cache-1e005b4d-7f94-4263-ba5d-303af209c408" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.028366] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.023185] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.026907] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.027070] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 930.027401] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.027777] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 930.027777] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 930.050798] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 930.050986] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 930.051148] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 930.051292] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 930.051420] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 930.051544] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 930.051670] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 930.051821] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 930.051946] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 930.052076] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 930.052202] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 930.052719] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.052923] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.053096] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.064237] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.064453] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.064619] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.064773] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 930.065868] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6adee9-9589-47ec-bac4-0a0ffc9da96a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.074966] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966202f4-606e-4ee3-935a-994b1ff56bc3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.089737] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05ca0a3-5aa1-4d72-98f3-0ec451153634 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.097143] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f278327f-cbf2-4626-985b-83f2d6df62b3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.126387] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180709MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 930.127022] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.127022] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.219245] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 760f3c9b-044d-4593-bc97-535ac09c3f3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 930.219453] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1323e67f-17c6-4432-8eea-98c285745766 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 930.219589] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance d7ec9b10-5975-4148-9931-3e7b0999b373 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 930.219849] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e41d1a8c-ad7e-4151-9745-04318b007dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 930.219849] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a918c107-526d-4cb7-a7dd-735a7d6420a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 930.219957] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ebd0c337-82cd-4d0a-9089-b9e2c72c417d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 930.220145] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 029e3122-7587-4675-b9d9-47cf8ffdbd1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 930.220272] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 930.220389] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0561164b-f3f9-446f-b597-4b6d16a32a00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 930.220507] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1e005b4d-7f94-4263-ba5d-303af209c408 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 930.241568] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f4e97733-101b-46dd-aec4-a3287b120eb0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.254337] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4a9416ca-21ad-42eb-9ffd-a0009d6d96a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.266131] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.277724] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5169fc76-eb51-45f0-9f19-737fb3213125 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.291107] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 31afc3bf-67c8-481a-9413-e69b5d6bf74f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.301515] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 92e0c9fd-582c-4118-b7e0-0fb822b1c38e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.313278] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance eca46087-33a7-4e9d-a7ce-6094886704a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.324632] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3295b0cb-15d5-4008-bc76-95b69f2f40a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.337296] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7aab03db-43b4-4884-bc20-0a29058ea2ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.348846] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ade4daab-4b02-4664-b745-a0c799d8415d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.361675] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance cd2482db-1c9e-4b1a-bb79-b7250cb863a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.372401] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 2c553a0b-1f8b-42aa-8b64-e22bc3cac45e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.384865] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 02b24610-323b-47b0-9c3e-f397cb48835e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.394616] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4439f302-8fa4-452a-97d8-4d6c1fef36d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.405813] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1241b06a-696d-4f96-961e-95129b1ba674 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.416909] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4be38fcc-5fa9-43b3-ab33-544812082b2f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.428455] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e1c8d6f6-b179-4e47-ac13-9abeb84e5a53 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 930.428712] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 930.428874] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 930.791996] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856914ea-4cb1-40fa-b296-d9112b1c4e6d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.800438] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef22bd7-8706-44c6-bea5-4f145c082460 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.830156] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95b36c4-2b5f-400e-9c26-85fba77d1816 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.840051] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e753c02b-0db7-411a-9684-4c0804c5aa5e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.853019] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.865026] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 930.882462] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 930.882848] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.756s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.857139] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 933.026879] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.022957] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 967.829691] env[62476]: WARNING oslo_vmware.rw_handles [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 967.829691] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 967.829691] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 967.829691] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 967.829691] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 967.829691] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 967.829691] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 967.829691] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 967.829691] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 967.829691] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 967.829691] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 967.829691] env[62476]: ERROR oslo_vmware.rw_handles [ 967.830716] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/91256662-edcd-49a2-baee-3e5c028d41d9/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 967.831959] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 967.832247] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Copying Virtual Disk [datastore1] vmware_temp/91256662-edcd-49a2-baee-3e5c028d41d9/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/91256662-edcd-49a2-baee-3e5c028d41d9/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 967.832555] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1110a790-fa24-4bf6-a181-cadd4907472c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.842249] env[62476]: DEBUG oslo_vmware.api [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Waiting for the task: (returnval){ [ 967.842249] env[62476]: value = "task-4319071" [ 967.842249] env[62476]: _type = "Task" [ 967.842249] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.851021] env[62476]: DEBUG oslo_vmware.api [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Task: {'id': task-4319071, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.353561] env[62476]: DEBUG oslo_vmware.exceptions [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 968.353561] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.354363] env[62476]: ERROR nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 968.354363] env[62476]: Faults: ['InvalidArgument'] [ 968.354363] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Traceback (most recent call last): [ 968.354363] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 968.354363] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] yield resources [ 968.354363] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 968.354363] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] self.driver.spawn(context, instance, image_meta, [ 968.354363] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 968.354363] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 968.354363] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 968.354363] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] self._fetch_image_if_missing(context, vi) [ 968.354363] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 968.354750] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] image_cache(vi, tmp_image_ds_loc) [ 968.354750] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 968.354750] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] vm_util.copy_virtual_disk( [ 968.354750] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 968.354750] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] session._wait_for_task(vmdk_copy_task) [ 968.354750] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 968.354750] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] return self.wait_for_task(task_ref) [ 968.354750] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 968.354750] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] return evt.wait() [ 968.354750] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 968.354750] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] result = hub.switch() [ 968.354750] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 968.354750] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] return self.greenlet.switch() [ 968.355182] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 968.355182] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] self.f(*self.args, **self.kw) [ 968.355182] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 968.355182] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] raise exceptions.translate_fault(task_info.error) [ 968.355182] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 968.355182] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Faults: ['InvalidArgument'] [ 968.355182] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] [ 968.355182] env[62476]: INFO nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Terminating instance [ 968.356085] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.356294] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 968.356552] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-828dda76-9f5a-40e5-8db4-528a232a5734 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.359113] env[62476]: DEBUG nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 968.359465] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 968.360134] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994bfee1-1557-4de8-bb97-d3c5ac8dec0f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.367662] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 968.367906] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fee5d477-2c95-4bbe-9927-41247ba15bbb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.370504] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 968.370687] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 968.371747] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83ce4876-af13-4740-b2e4-dbac8ca18e0d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.378231] env[62476]: DEBUG oslo_vmware.api [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Waiting for the task: (returnval){ [ 968.378231] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52e83777-6af1-1b3d-5407-52cb7a5cf564" [ 968.378231] env[62476]: _type = "Task" [ 968.378231] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.394034] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 968.394334] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Creating directory with path [datastore1] vmware_temp/abe541a2-d48f-4cd0-811f-df539721478c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 968.394457] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-856315e7-1591-4ff3-9129-4279c0ce3453 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.416656] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Created directory with path [datastore1] vmware_temp/abe541a2-d48f-4cd0-811f-df539721478c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 968.416886] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Fetch image to [datastore1] vmware_temp/abe541a2-d48f-4cd0-811f-df539721478c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 968.417076] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/abe541a2-d48f-4cd0-811f-df539721478c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 968.417911] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6226f0b1-1a79-4324-88c1-1e0e56fb23b0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.425917] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd81e65-b04a-42f9-a9ad-93107ea27da1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.436248] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253d4e02-e8fb-4f5c-b8e1-9d888d4948a1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.442269] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 968.442469] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 968.443310] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Deleting the datastore file [datastore1] 760f3c9b-044d-4593-bc97-535ac09c3f3b {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 968.443310] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf80f0c7-1720-4833-9b70-e78b243736b0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.471914] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8359533a-e5c4-429d-a268-8d39df6c5c18 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.475831] env[62476]: DEBUG oslo_vmware.api [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Waiting for the task: (returnval){ [ 968.475831] env[62476]: value = "task-4319073" [ 968.475831] env[62476]: _type = "Task" [ 968.475831] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.481653] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-01982fe6-6647-4cb3-bc5d-b49b2a6ee436 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.486206] env[62476]: DEBUG oslo_vmware.api [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Task: {'id': task-4319073, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.506224] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 968.563294] env[62476]: DEBUG oslo_vmware.rw_handles [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/abe541a2-d48f-4cd0-811f-df539721478c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 968.624366] env[62476]: DEBUG oslo_vmware.rw_handles [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 968.624589] env[62476]: DEBUG oslo_vmware.rw_handles [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/abe541a2-d48f-4cd0-811f-df539721478c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 968.986709] env[62476]: DEBUG oslo_vmware.api [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Task: {'id': task-4319073, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073016} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.987067] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 968.987160] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 968.987330] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 968.987507] env[62476]: INFO nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Took 0.63 seconds to destroy the instance on the hypervisor. [ 968.989649] env[62476]: DEBUG nova.compute.claims [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 968.989823] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.990047] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.368229] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19cc6d08-89fb-4a33-ac34-12c1873653cd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.376409] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bc8b13-b1cd-4f8a-a1d7-4242b01538db {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.407493] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc7f10b-e583-41ae-be4c-938331b29de7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.416111] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee600b5-a2ae-4559-b004-8607737bf317 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.429517] env[62476]: DEBUG nova.compute.provider_tree [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.439179] env[62476]: DEBUG nova.scheduler.client.report [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 969.454493] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.464s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.455050] env[62476]: ERROR nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 969.455050] env[62476]: Faults: ['InvalidArgument'] [ 969.455050] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Traceback (most recent call last): [ 969.455050] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 969.455050] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] self.driver.spawn(context, instance, image_meta, [ 969.455050] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 969.455050] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 969.455050] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 969.455050] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] self._fetch_image_if_missing(context, vi) [ 969.455050] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 969.455050] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] image_cache(vi, tmp_image_ds_loc) [ 969.455050] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 969.455442] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] vm_util.copy_virtual_disk( [ 969.455442] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 969.455442] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] session._wait_for_task(vmdk_copy_task) [ 969.455442] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 969.455442] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] return self.wait_for_task(task_ref) [ 969.455442] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 969.455442] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] return evt.wait() [ 969.455442] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 969.455442] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] result = hub.switch() [ 969.455442] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 969.455442] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] return self.greenlet.switch() [ 969.455442] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 969.455442] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] self.f(*self.args, **self.kw) [ 969.455786] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 969.455786] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] raise exceptions.translate_fault(task_info.error) [ 969.455786] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 969.455786] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Faults: ['InvalidArgument'] [ 969.455786] env[62476]: ERROR nova.compute.manager [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] [ 969.455929] env[62476]: DEBUG nova.compute.utils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 969.457389] env[62476]: DEBUG nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Build of instance 760f3c9b-044d-4593-bc97-535ac09c3f3b was re-scheduled: A specified parameter was not correct: fileType [ 969.457389] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 969.457761] env[62476]: DEBUG nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 969.457933] env[62476]: DEBUG nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 969.458125] env[62476]: DEBUG nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 969.458309] env[62476]: DEBUG nova.network.neutron [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 969.827557] env[62476]: DEBUG nova.network.neutron [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.840030] env[62476]: INFO nova.compute.manager [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Took 0.38 seconds to deallocate network for instance. [ 969.986039] env[62476]: INFO nova.scheduler.client.report [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Deleted allocations for instance 760f3c9b-044d-4593-bc97-535ac09c3f3b [ 970.010337] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a231e5f5-959e-40d8-aff7-a0eab09ec64b tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Lock "760f3c9b-044d-4593-bc97-535ac09c3f3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 333.668s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.011471] env[62476]: DEBUG oslo_concurrency.lockutils [None req-308ca898-4994-40db-a3de-187cc2a9004a tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Lock "760f3c9b-044d-4593-bc97-535ac09c3f3b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 133.446s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.011698] env[62476]: DEBUG oslo_concurrency.lockutils [None req-308ca898-4994-40db-a3de-187cc2a9004a tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Acquiring lock "760f3c9b-044d-4593-bc97-535ac09c3f3b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.011903] env[62476]: DEBUG oslo_concurrency.lockutils [None req-308ca898-4994-40db-a3de-187cc2a9004a tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Lock "760f3c9b-044d-4593-bc97-535ac09c3f3b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.012091] env[62476]: DEBUG oslo_concurrency.lockutils [None req-308ca898-4994-40db-a3de-187cc2a9004a tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Lock "760f3c9b-044d-4593-bc97-535ac09c3f3b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.014168] env[62476]: INFO nova.compute.manager [None req-308ca898-4994-40db-a3de-187cc2a9004a tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Terminating instance [ 970.016531] env[62476]: DEBUG nova.compute.manager [None req-308ca898-4994-40db-a3de-187cc2a9004a tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 970.016731] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-308ca898-4994-40db-a3de-187cc2a9004a tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 970.017260] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75ec337c-1d6a-4981-9aed-1a26c0f12c08 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.027191] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369f8100-5d8f-4713-91fd-d5fee096cd23 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.037584] env[62476]: DEBUG nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 970.059210] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-308ca898-4994-40db-a3de-187cc2a9004a tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 760f3c9b-044d-4593-bc97-535ac09c3f3b could not be found. [ 970.059432] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-308ca898-4994-40db-a3de-187cc2a9004a tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 970.059687] env[62476]: INFO nova.compute.manager [None req-308ca898-4994-40db-a3de-187cc2a9004a tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 970.059946] env[62476]: DEBUG oslo.service.loopingcall [None req-308ca898-4994-40db-a3de-187cc2a9004a tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 970.060230] env[62476]: DEBUG nova.compute.manager [-] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 970.060335] env[62476]: DEBUG nova.network.neutron [-] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 970.087026] env[62476]: DEBUG nova.network.neutron [-] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.093699] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.093954] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.095822] env[62476]: INFO nova.compute.claims [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 970.099037] env[62476]: INFO nova.compute.manager [-] [instance: 760f3c9b-044d-4593-bc97-535ac09c3f3b] Took 0.04 seconds to deallocate network for instance. [ 970.227557] env[62476]: DEBUG oslo_concurrency.lockutils [None req-308ca898-4994-40db-a3de-187cc2a9004a tempest-AttachInterfacesV270Test-1253282387 tempest-AttachInterfacesV270Test-1253282387-project-member] Lock "760f3c9b-044d-4593-bc97-535ac09c3f3b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.216s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.479048] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9213a8af-08e9-495b-b5a1-ee2620a6d09b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.487023] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e99a00-9cba-4b34-bfc6-dae4ecb801eb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.517453] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b96d5e7-9c40-4ce4-84fa-c927328a6cb7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.525233] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba065056-ef53-41a7-af10-93e2bb41261a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.538629] env[62476]: DEBUG nova.compute.provider_tree [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.546972] env[62476]: DEBUG nova.scheduler.client.report [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 970.560795] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.467s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.561343] env[62476]: DEBUG nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 970.598674] env[62476]: DEBUG nova.compute.utils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 970.599945] env[62476]: DEBUG nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 970.600273] env[62476]: DEBUG nova.network.neutron [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 970.612145] env[62476]: DEBUG nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 970.683298] env[62476]: DEBUG nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 970.695879] env[62476]: DEBUG nova.policy [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6cd3a757654146328dda1c922c3d8537', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5c72ad8ddb4466bb06b7bf4b8669248', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 970.713532] env[62476]: DEBUG nova.virt.hardware [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 970.713758] env[62476]: DEBUG nova.virt.hardware [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 970.713913] env[62476]: DEBUG nova.virt.hardware [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 970.714117] env[62476]: DEBUG nova.virt.hardware [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 970.714271] env[62476]: DEBUG nova.virt.hardware [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 970.714417] env[62476]: DEBUG nova.virt.hardware [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 970.714620] env[62476]: DEBUG nova.virt.hardware [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 970.714778] env[62476]: DEBUG nova.virt.hardware [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 970.714940] env[62476]: DEBUG nova.virt.hardware [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 970.715141] env[62476]: DEBUG nova.virt.hardware [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 970.715327] env[62476]: DEBUG nova.virt.hardware [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 970.716228] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680b48f6-4e2a-40e2-b273-e67c31e7d41a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.726352] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7764614-570b-4b75-b4a3-c8f1e3bcd502 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.640264] env[62476]: DEBUG nova.network.neutron [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Successfully created port: f1ea5b02-0bba-4930-bdad-f6518c08d034 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 972.947592] env[62476]: DEBUG nova.compute.manager [req-7c1f9312-c8d8-4e21-a4c1-f365296b452f req-f857bcef-0880-4f50-b684-60ba912d8ce0 service nova] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Received event network-vif-plugged-f1ea5b02-0bba-4930-bdad-f6518c08d034 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 972.947824] env[62476]: DEBUG oslo_concurrency.lockutils [req-7c1f9312-c8d8-4e21-a4c1-f365296b452f req-f857bcef-0880-4f50-b684-60ba912d8ce0 service nova] Acquiring lock "f4e97733-101b-46dd-aec4-a3287b120eb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.948133] env[62476]: DEBUG oslo_concurrency.lockutils [req-7c1f9312-c8d8-4e21-a4c1-f365296b452f req-f857bcef-0880-4f50-b684-60ba912d8ce0 service nova] Lock "f4e97733-101b-46dd-aec4-a3287b120eb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.948313] env[62476]: DEBUG oslo_concurrency.lockutils [req-7c1f9312-c8d8-4e21-a4c1-f365296b452f req-f857bcef-0880-4f50-b684-60ba912d8ce0 service nova] Lock "f4e97733-101b-46dd-aec4-a3287b120eb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.948473] env[62476]: DEBUG nova.compute.manager [req-7c1f9312-c8d8-4e21-a4c1-f365296b452f req-f857bcef-0880-4f50-b684-60ba912d8ce0 service nova] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] No waiting events found dispatching network-vif-plugged-f1ea5b02-0bba-4930-bdad-f6518c08d034 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 972.948627] env[62476]: WARNING nova.compute.manager [req-7c1f9312-c8d8-4e21-a4c1-f365296b452f req-f857bcef-0880-4f50-b684-60ba912d8ce0 service nova] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Received unexpected event network-vif-plugged-f1ea5b02-0bba-4930-bdad-f6518c08d034 for instance with vm_state building and task_state spawning. [ 973.035767] env[62476]: DEBUG nova.network.neutron [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Successfully updated port: f1ea5b02-0bba-4930-bdad-f6518c08d034 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 973.050982] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Acquiring lock "refresh_cache-f4e97733-101b-46dd-aec4-a3287b120eb0" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.050982] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Acquired lock "refresh_cache-f4e97733-101b-46dd-aec4-a3287b120eb0" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.050982] env[62476]: DEBUG nova.network.neutron [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 973.128106] env[62476]: DEBUG nova.network.neutron [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 973.575317] env[62476]: DEBUG nova.network.neutron [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Updating instance_info_cache with network_info: [{"id": "f1ea5b02-0bba-4930-bdad-f6518c08d034", "address": "fa:16:3e:8c:76:7b", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.137", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1ea5b02-0b", "ovs_interfaceid": "f1ea5b02-0bba-4930-bdad-f6518c08d034", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.588896] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Releasing lock "refresh_cache-f4e97733-101b-46dd-aec4-a3287b120eb0" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.589488] env[62476]: DEBUG nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Instance network_info: |[{"id": "f1ea5b02-0bba-4930-bdad-f6518c08d034", "address": "fa:16:3e:8c:76:7b", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.137", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1ea5b02-0b", "ovs_interfaceid": "f1ea5b02-0bba-4930-bdad-f6518c08d034", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 973.591781] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:76:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '715e3f37-7401-48fb-a0ee-59d340b40de1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1ea5b02-0bba-4930-bdad-f6518c08d034', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.603414] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Creating folder: Project (b5c72ad8ddb4466bb06b7bf4b8669248). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 973.605351] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c977d634-b505-4af2-b7bc-9ebb2d1ca436 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.619163] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Created folder: Project (b5c72ad8ddb4466bb06b7bf4b8669248) in parent group-v849485. [ 973.619368] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Creating folder: Instances. Parent ref: group-v849534. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 973.619627] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-adbf26bb-b078-42f2-9b39-eb53a7b78085 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.629724] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Created folder: Instances in parent group-v849534. [ 973.629981] env[62476]: DEBUG oslo.service.loopingcall [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 973.630228] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 973.630408] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-064e33ed-f384-4857-a475-28e56c990c69 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.657661] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.657661] env[62476]: value = "task-4319076" [ 973.657661] env[62476]: _type = "Task" [ 973.657661] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.668685] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319076, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.167952] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319076, 'name': CreateVM_Task, 'duration_secs': 0.319712} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.168321] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 974.168903] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.169172] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.169462] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 974.169895] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8c23a25-0e61-4b5c-a3b1-2a94a5e6f20a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.176069] env[62476]: DEBUG oslo_vmware.api [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Waiting for the task: (returnval){ [ 974.176069] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]529381d8-82c8-46dd-4a84-30b635302e53" [ 974.176069] env[62476]: _type = "Task" [ 974.176069] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.185681] env[62476]: DEBUG oslo_vmware.api [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]529381d8-82c8-46dd-4a84-30b635302e53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.685641] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.685975] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 974.686128] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.974691] env[62476]: DEBUG nova.compute.manager [req-70056efa-d172-4b14-80b3-089ed0fec335 req-ac56dd1c-a6a6-40e0-a342-f5db16c4bf79 service nova] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Received event network-changed-f1ea5b02-0bba-4930-bdad-f6518c08d034 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 974.974899] env[62476]: DEBUG nova.compute.manager [req-70056efa-d172-4b14-80b3-089ed0fec335 req-ac56dd1c-a6a6-40e0-a342-f5db16c4bf79 service nova] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Refreshing instance network info cache due to event network-changed-f1ea5b02-0bba-4930-bdad-f6518c08d034. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 974.975158] env[62476]: DEBUG oslo_concurrency.lockutils [req-70056efa-d172-4b14-80b3-089ed0fec335 req-ac56dd1c-a6a6-40e0-a342-f5db16c4bf79 service nova] Acquiring lock "refresh_cache-f4e97733-101b-46dd-aec4-a3287b120eb0" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.975311] env[62476]: DEBUG oslo_concurrency.lockutils [req-70056efa-d172-4b14-80b3-089ed0fec335 req-ac56dd1c-a6a6-40e0-a342-f5db16c4bf79 service nova] Acquired lock "refresh_cache-f4e97733-101b-46dd-aec4-a3287b120eb0" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.975475] env[62476]: DEBUG nova.network.neutron [req-70056efa-d172-4b14-80b3-089ed0fec335 req-ac56dd1c-a6a6-40e0-a342-f5db16c4bf79 service nova] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Refreshing network info cache for port f1ea5b02-0bba-4930-bdad-f6518c08d034 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 975.486035] env[62476]: DEBUG nova.network.neutron [req-70056efa-d172-4b14-80b3-089ed0fec335 req-ac56dd1c-a6a6-40e0-a342-f5db16c4bf79 service nova] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Updated VIF entry in instance network info cache for port f1ea5b02-0bba-4930-bdad-f6518c08d034. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 975.486427] env[62476]: DEBUG nova.network.neutron [req-70056efa-d172-4b14-80b3-089ed0fec335 req-ac56dd1c-a6a6-40e0-a342-f5db16c4bf79 service nova] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Updating instance_info_cache with network_info: [{"id": "f1ea5b02-0bba-4930-bdad-f6518c08d034", "address": "fa:16:3e:8c:76:7b", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.137", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1ea5b02-0b", "ovs_interfaceid": "f1ea5b02-0bba-4930-bdad-f6518c08d034", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.497425] env[62476]: DEBUG oslo_concurrency.lockutils [req-70056efa-d172-4b14-80b3-089ed0fec335 req-ac56dd1c-a6a6-40e0-a342-f5db16c4bf79 service nova] Releasing lock "refresh_cache-f4e97733-101b-46dd-aec4-a3287b120eb0" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.592544] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Acquiring lock "f4e97733-101b-46dd-aec4-a3287b120eb0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.154374] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Acquiring lock "3cdef023-ce78-4c3b-8476-5508c18204c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.154857] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Lock "3cdef023-ce78-4c3b-8476-5508c18204c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.027170] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.027553] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 990.027553] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 990.050327] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 990.050555] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 990.050638] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 990.050732] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 990.050860] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 990.051273] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 990.051273] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 990.051273] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 990.051495] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 990.051495] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 990.051612] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 990.052038] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.052302] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.052396] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 991.027374] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 991.027680] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 991.039311] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.039553] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.039728] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.039887] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 991.041291] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c603daf5-0f9a-495e-8fe2-d29d61c260f1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.051518] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d45a356-01be-405c-a97d-d48f581a5d00 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.067080] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ae57ad-f747-4364-8944-8fa50e5c7c08 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.074301] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924172b2-9f8f-4c54-baed-be689f8299bc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.105592] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180711MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 991.105834] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.106157] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.198618] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1323e67f-17c6-4432-8eea-98c285745766 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.198874] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance d7ec9b10-5975-4148-9931-3e7b0999b373 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.198943] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e41d1a8c-ad7e-4151-9745-04318b007dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.199058] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a918c107-526d-4cb7-a7dd-735a7d6420a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.199201] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ebd0c337-82cd-4d0a-9089-b9e2c72c417d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.199417] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 029e3122-7587-4675-b9d9-47cf8ffdbd1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.199506] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.199555] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0561164b-f3f9-446f-b597-4b6d16a32a00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.199684] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1e005b4d-7f94-4263-ba5d-303af209c408 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.199823] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f4e97733-101b-46dd-aec4-a3287b120eb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 991.215234] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.226441] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5169fc76-eb51-45f0-9f19-737fb3213125 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.239681] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 31afc3bf-67c8-481a-9413-e69b5d6bf74f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.250991] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 92e0c9fd-582c-4118-b7e0-0fb822b1c38e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.262532] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance eca46087-33a7-4e9d-a7ce-6094886704a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.274031] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3295b0cb-15d5-4008-bc76-95b69f2f40a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.284208] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7aab03db-43b4-4884-bc20-0a29058ea2ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.294643] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ade4daab-4b02-4664-b745-a0c799d8415d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.304872] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance cd2482db-1c9e-4b1a-bb79-b7250cb863a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.314798] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 2c553a0b-1f8b-42aa-8b64-e22bc3cac45e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.326877] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 02b24610-323b-47b0-9c3e-f397cb48835e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.336465] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4439f302-8fa4-452a-97d8-4d6c1fef36d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.346957] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1241b06a-696d-4f96-961e-95129b1ba674 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.358029] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4be38fcc-5fa9-43b3-ab33-544812082b2f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.368799] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e1c8d6f6-b179-4e47-ac13-9abeb84e5a53 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.379816] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3cdef023-ce78-4c3b-8476-5508c18204c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 991.380082] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 991.380235] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 991.713018] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b08e0c-87af-48fb-afa1-982cdca441bb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.719112] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2169876b-6647-42f4-a806-ed364971803f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.749043] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af421be-84f6-4b28-b56a-695a852bfa89 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.758415] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95161eee-c2aa-460a-9044-021eead83761 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.773466] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.785625] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 991.802480] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 991.802981] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.697s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.804060] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.804446] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 993.027625] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 994.026757] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.569489] env[62476]: WARNING oslo_vmware.rw_handles [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1017.569489] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1017.569489] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1017.569489] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1017.569489] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1017.569489] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1017.569489] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1017.569489] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1017.569489] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1017.569489] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1017.569489] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1017.569489] env[62476]: ERROR oslo_vmware.rw_handles [ 1017.570045] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/abe541a2-d48f-4cd0-811f-df539721478c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1017.572075] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1017.572376] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Copying Virtual Disk [datastore1] vmware_temp/abe541a2-d48f-4cd0-811f-df539721478c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/abe541a2-d48f-4cd0-811f-df539721478c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1017.572700] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-23a937c9-1a89-449b-b5e8-f1cadba64baf {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.581164] env[62476]: DEBUG oslo_vmware.api [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Waiting for the task: (returnval){ [ 1017.581164] env[62476]: value = "task-4319077" [ 1017.581164] env[62476]: _type = "Task" [ 1017.581164] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.589673] env[62476]: DEBUG oslo_vmware.api [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Task: {'id': task-4319077, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.091955] env[62476]: DEBUG oslo_vmware.exceptions [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1018.092191] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.092770] env[62476]: ERROR nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1018.092770] env[62476]: Faults: ['InvalidArgument'] [ 1018.092770] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] Traceback (most recent call last): [ 1018.092770] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1018.092770] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] yield resources [ 1018.092770] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1018.092770] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] self.driver.spawn(context, instance, image_meta, [ 1018.092770] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1018.092770] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1018.092770] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1018.092770] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] self._fetch_image_if_missing(context, vi) [ 1018.092770] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1018.093073] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] image_cache(vi, tmp_image_ds_loc) [ 1018.093073] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1018.093073] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] vm_util.copy_virtual_disk( [ 1018.093073] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1018.093073] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] session._wait_for_task(vmdk_copy_task) [ 1018.093073] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1018.093073] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] return self.wait_for_task(task_ref) [ 1018.093073] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1018.093073] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] return evt.wait() [ 1018.093073] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1018.093073] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] result = hub.switch() [ 1018.093073] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1018.093073] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] return self.greenlet.switch() [ 1018.093416] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1018.093416] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] self.f(*self.args, **self.kw) [ 1018.093416] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1018.093416] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] raise exceptions.translate_fault(task_info.error) [ 1018.093416] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1018.093416] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] Faults: ['InvalidArgument'] [ 1018.093416] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] [ 1018.093416] env[62476]: INFO nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Terminating instance [ 1018.094720] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.094949] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1018.095208] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3705268a-1714-46f7-bb38-7efaa38a2fbc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.097462] env[62476]: DEBUG nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1018.097663] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1018.098472] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebcc537-76f0-4d6c-a8bc-7035c1d6933c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.105687] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1018.105923] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ccc3ffb1-5ab2-4127-a5a6-dcf42e3ac348 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.108294] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1018.108467] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1018.109433] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62232735-d72b-4e36-beaf-3ff532d9590b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.114730] env[62476]: DEBUG oslo_vmware.api [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Waiting for the task: (returnval){ [ 1018.114730] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]520489f0-2c2c-34ed-21c5-1880b8136e07" [ 1018.114730] env[62476]: _type = "Task" [ 1018.114730] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.129923] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1018.130206] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Creating directory with path [datastore1] vmware_temp/985d91b8-b6ad-4a30-b881-2af8cab6ad4e/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1018.130488] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86907786-b309-4b96-ae42-1f737b35a853 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.152931] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Created directory with path [datastore1] vmware_temp/985d91b8-b6ad-4a30-b881-2af8cab6ad4e/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1018.153303] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Fetch image to [datastore1] vmware_temp/985d91b8-b6ad-4a30-b881-2af8cab6ad4e/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1018.153600] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/985d91b8-b6ad-4a30-b881-2af8cab6ad4e/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1018.154816] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fdef080-a708-4e7b-b51b-89764ddd7ce2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.162538] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513b9e15-00a4-465a-aad6-9c62e11ba5ad {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.172380] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5528cb1d-536e-4bc2-8b4a-3f96b996267b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.204722] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193e760e-602b-43d7-a904-d8c1346a915e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.207391] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1018.207590] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1018.207769] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Deleting the datastore file [datastore1] 1323e67f-17c6-4432-8eea-98c285745766 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1018.208015] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-972971f9-fcd3-414b-a0a4-aff8520f4b95 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.213569] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-92053f72-e159-445a-ad46-afb9c87e5d61 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.216575] env[62476]: DEBUG oslo_vmware.api [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Waiting for the task: (returnval){ [ 1018.216575] env[62476]: value = "task-4319079" [ 1018.216575] env[62476]: _type = "Task" [ 1018.216575] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.224672] env[62476]: DEBUG oslo_vmware.api [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Task: {'id': task-4319079, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.238834] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1018.297172] env[62476]: DEBUG oslo_vmware.rw_handles [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/985d91b8-b6ad-4a30-b881-2af8cab6ad4e/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1018.359491] env[62476]: DEBUG oslo_vmware.rw_handles [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1018.359695] env[62476]: DEBUG oslo_vmware.rw_handles [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/985d91b8-b6ad-4a30-b881-2af8cab6ad4e/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1018.727192] env[62476]: DEBUG oslo_vmware.api [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Task: {'id': task-4319079, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080456} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.727520] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1018.727651] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1018.727803] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1018.727984] env[62476]: INFO nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1018.730191] env[62476]: DEBUG nova.compute.claims [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1018.730367] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.730579] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.117875] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f056a478-7bdd-4897-b897-c44b693b01e4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.126027] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888979e9-accd-4b63-bdc2-45819586884b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.158678] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f324a5e0-ae10-4d9e-a274-0fe84e9e38f7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.167668] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04e001b-140b-47f4-9a6c-e85d20cf5ee1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.182244] env[62476]: DEBUG nova.compute.provider_tree [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.193884] env[62476]: DEBUG nova.scheduler.client.report [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1019.214079] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.483s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.214492] env[62476]: ERROR nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1019.214492] env[62476]: Faults: ['InvalidArgument'] [ 1019.214492] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] Traceback (most recent call last): [ 1019.214492] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1019.214492] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] self.driver.spawn(context, instance, image_meta, [ 1019.214492] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1019.214492] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1019.214492] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1019.214492] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] self._fetch_image_if_missing(context, vi) [ 1019.214492] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1019.214492] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] image_cache(vi, tmp_image_ds_loc) [ 1019.214492] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1019.215086] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] vm_util.copy_virtual_disk( [ 1019.215086] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1019.215086] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] session._wait_for_task(vmdk_copy_task) [ 1019.215086] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1019.215086] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] return self.wait_for_task(task_ref) [ 1019.215086] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1019.215086] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] return evt.wait() [ 1019.215086] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1019.215086] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] result = hub.switch() [ 1019.215086] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1019.215086] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] return self.greenlet.switch() [ 1019.215086] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1019.215086] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] self.f(*self.args, **self.kw) [ 1019.215572] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1019.215572] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] raise exceptions.translate_fault(task_info.error) [ 1019.215572] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1019.215572] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] Faults: ['InvalidArgument'] [ 1019.215572] env[62476]: ERROR nova.compute.manager [instance: 1323e67f-17c6-4432-8eea-98c285745766] [ 1019.215572] env[62476]: DEBUG nova.compute.utils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1019.216895] env[62476]: DEBUG nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Build of instance 1323e67f-17c6-4432-8eea-98c285745766 was re-scheduled: A specified parameter was not correct: fileType [ 1019.216895] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1019.217307] env[62476]: DEBUG nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1019.217486] env[62476]: DEBUG nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1019.217661] env[62476]: DEBUG nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1019.217826] env[62476]: DEBUG nova.network.neutron [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1019.627219] env[62476]: DEBUG nova.network.neutron [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.641548] env[62476]: INFO nova.compute.manager [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Took 0.42 seconds to deallocate network for instance. [ 1019.754078] env[62476]: INFO nova.scheduler.client.report [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Deleted allocations for instance 1323e67f-17c6-4432-8eea-98c285745766 [ 1019.775941] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6b05be3b-25f3-458f-9f8f-30e38595748b tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Lock "1323e67f-17c6-4432-8eea-98c285745766" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 374.474s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.777175] env[62476]: DEBUG oslo_concurrency.lockutils [None req-25255a71-d88b-4527-9b3c-07861d149202 tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Lock "1323e67f-17c6-4432-8eea-98c285745766" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 176.029s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.777501] env[62476]: DEBUG oslo_concurrency.lockutils [None req-25255a71-d88b-4527-9b3c-07861d149202 tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Acquiring lock "1323e67f-17c6-4432-8eea-98c285745766-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.777874] env[62476]: DEBUG oslo_concurrency.lockutils [None req-25255a71-d88b-4527-9b3c-07861d149202 tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Lock "1323e67f-17c6-4432-8eea-98c285745766-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.778581] env[62476]: DEBUG oslo_concurrency.lockutils [None req-25255a71-d88b-4527-9b3c-07861d149202 tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Lock "1323e67f-17c6-4432-8eea-98c285745766-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.780823] env[62476]: INFO nova.compute.manager [None req-25255a71-d88b-4527-9b3c-07861d149202 tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Terminating instance [ 1019.782565] env[62476]: DEBUG nova.compute.manager [None req-25255a71-d88b-4527-9b3c-07861d149202 tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1019.782768] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-25255a71-d88b-4527-9b3c-07861d149202 tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1019.783588] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-58b99995-47b0-4c71-8017-b57e9cb6f472 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.790244] env[62476]: DEBUG nova.compute.manager [None req-49906688-f7e6-4ae4-85ce-7e4c74c37210 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: 4a9416ca-21ad-42eb-9ffd-a0009d6d96a3] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1019.797539] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c12bbbf-539b-4413-ae64-1fcf84ffdf12 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.816018] env[62476]: DEBUG nova.compute.manager [None req-49906688-f7e6-4ae4-85ce-7e4c74c37210 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: 4a9416ca-21ad-42eb-9ffd-a0009d6d96a3] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1019.831188] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-25255a71-d88b-4527-9b3c-07861d149202 tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1323e67f-17c6-4432-8eea-98c285745766 could not be found. [ 1019.831405] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-25255a71-d88b-4527-9b3c-07861d149202 tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1019.831582] env[62476]: INFO nova.compute.manager [None req-25255a71-d88b-4527-9b3c-07861d149202 tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1019.831829] env[62476]: DEBUG oslo.service.loopingcall [None req-25255a71-d88b-4527-9b3c-07861d149202 tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1019.832374] env[62476]: DEBUG nova.compute.manager [-] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1019.832477] env[62476]: DEBUG nova.network.neutron [-] [instance: 1323e67f-17c6-4432-8eea-98c285745766] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1019.849416] env[62476]: DEBUG oslo_concurrency.lockutils [None req-49906688-f7e6-4ae4-85ce-7e4c74c37210 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Lock "4a9416ca-21ad-42eb-9ffd-a0009d6d96a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.542s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.864154] env[62476]: DEBUG nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1019.867553] env[62476]: DEBUG nova.network.neutron [-] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.878066] env[62476]: INFO nova.compute.manager [-] [instance: 1323e67f-17c6-4432-8eea-98c285745766] Took 0.04 seconds to deallocate network for instance. [ 1019.931519] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.931764] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.933382] env[62476]: INFO nova.compute.claims [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1020.026690] env[62476]: DEBUG oslo_concurrency.lockutils [None req-25255a71-d88b-4527-9b3c-07861d149202 tempest-ImagesNegativeTestJSON-697283829 tempest-ImagesNegativeTestJSON-697283829-project-member] Lock "1323e67f-17c6-4432-8eea-98c285745766" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.249s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.346627] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47339f76-4ccd-4580-be15-01fd9d184a97 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.354681] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e906bb29-ed84-4db6-8710-e6283423fa64 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.384410] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5790d1-bbb9-402c-8939-1d831b793dcf {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.391808] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ace676-9363-4c2f-ab85-1a491bbc5d36 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.405812] env[62476]: DEBUG nova.compute.provider_tree [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.414263] env[62476]: DEBUG nova.scheduler.client.report [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1020.430862] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.499s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.431373] env[62476]: DEBUG nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1020.467707] env[62476]: DEBUG nova.compute.utils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1020.469132] env[62476]: DEBUG nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1020.469347] env[62476]: DEBUG nova.network.neutron [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1020.479516] env[62476]: DEBUG nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1020.555248] env[62476]: DEBUG nova.policy [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10dc9791cc96471c926e4eb8e1129b2f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cdbe9b66c724475a673e94fdb118821', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1020.583757] env[62476]: DEBUG nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1020.621396] env[62476]: DEBUG nova.virt.hardware [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1020.622204] env[62476]: DEBUG nova.virt.hardware [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1020.622204] env[62476]: DEBUG nova.virt.hardware [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1020.622204] env[62476]: DEBUG nova.virt.hardware [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1020.622204] env[62476]: DEBUG nova.virt.hardware [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1020.622437] env[62476]: DEBUG nova.virt.hardware [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1020.622523] env[62476]: DEBUG nova.virt.hardware [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1020.622689] env[62476]: DEBUG nova.virt.hardware [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1020.622855] env[62476]: DEBUG nova.virt.hardware [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1020.623025] env[62476]: DEBUG nova.virt.hardware [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1020.623206] env[62476]: DEBUG nova.virt.hardware [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1020.624152] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a3e05b-3f93-43e9-bd57-28e71ec93633 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.633583] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eccf705-823b-46f8-a67b-d199ba8abe7b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.983458] env[62476]: DEBUG nova.network.neutron [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Successfully created port: e358b3ca-abae-45e8-a676-39380b422af4 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1021.898154] env[62476]: DEBUG nova.network.neutron [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Successfully updated port: e358b3ca-abae-45e8-a676-39380b422af4 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1021.918557] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "refresh_cache-87f2ddc2-11d2-49de-a3de-9e7082ab88c4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1021.918742] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquired lock "refresh_cache-87f2ddc2-11d2-49de-a3de-9e7082ab88c4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.919086] env[62476]: DEBUG nova.network.neutron [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1021.984676] env[62476]: DEBUG nova.compute.manager [req-b2226bbc-2edb-4651-95ad-b1cc2aec5642 req-a5337bac-d1a8-4a4c-8848-a24b95ba8210 service nova] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Received event network-vif-plugged-e358b3ca-abae-45e8-a676-39380b422af4 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1021.984950] env[62476]: DEBUG oslo_concurrency.lockutils [req-b2226bbc-2edb-4651-95ad-b1cc2aec5642 req-a5337bac-d1a8-4a4c-8848-a24b95ba8210 service nova] Acquiring lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.985115] env[62476]: DEBUG oslo_concurrency.lockutils [req-b2226bbc-2edb-4651-95ad-b1cc2aec5642 req-a5337bac-d1a8-4a4c-8848-a24b95ba8210 service nova] Lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.985284] env[62476]: DEBUG oslo_concurrency.lockutils [req-b2226bbc-2edb-4651-95ad-b1cc2aec5642 req-a5337bac-d1a8-4a4c-8848-a24b95ba8210 service nova] Lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.985451] env[62476]: DEBUG nova.compute.manager [req-b2226bbc-2edb-4651-95ad-b1cc2aec5642 req-a5337bac-d1a8-4a4c-8848-a24b95ba8210 service nova] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] No waiting events found dispatching network-vif-plugged-e358b3ca-abae-45e8-a676-39380b422af4 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1021.985617] env[62476]: WARNING nova.compute.manager [req-b2226bbc-2edb-4651-95ad-b1cc2aec5642 req-a5337bac-d1a8-4a4c-8848-a24b95ba8210 service nova] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Received unexpected event network-vif-plugged-e358b3ca-abae-45e8-a676-39380b422af4 for instance with vm_state building and task_state spawning. [ 1022.028240] env[62476]: DEBUG nova.network.neutron [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1022.632570] env[62476]: DEBUG nova.network.neutron [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Updating instance_info_cache with network_info: [{"id": "e358b3ca-abae-45e8-a676-39380b422af4", "address": "fa:16:3e:98:2b:62", "network": {"id": "7951b440-c6fc-4447-b736-de183b5d8603", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1845317819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cdbe9b66c724475a673e94fdb118821", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape358b3ca-ab", "ovs_interfaceid": "e358b3ca-abae-45e8-a676-39380b422af4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.648016] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Releasing lock "refresh_cache-87f2ddc2-11d2-49de-a3de-9e7082ab88c4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.648016] env[62476]: DEBUG nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Instance network_info: |[{"id": "e358b3ca-abae-45e8-a676-39380b422af4", "address": "fa:16:3e:98:2b:62", "network": {"id": "7951b440-c6fc-4447-b736-de183b5d8603", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1845317819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cdbe9b66c724475a673e94fdb118821", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape358b3ca-ab", "ovs_interfaceid": "e358b3ca-abae-45e8-a676-39380b422af4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1022.648196] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:2b:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24376631-ee89-4ff1-b8ac-f09911fc8329', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e358b3ca-abae-45e8-a676-39380b422af4', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1022.657669] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Creating folder: Project (8cdbe9b66c724475a673e94fdb118821). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1022.658494] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23c6c320-98bb-4d4a-b171-9a5819a584aa {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.670418] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Created folder: Project (8cdbe9b66c724475a673e94fdb118821) in parent group-v849485. [ 1022.670638] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Creating folder: Instances. Parent ref: group-v849537. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1022.670902] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c032fbc7-41f4-425a-afcd-f04136fa1f27 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.680401] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Created folder: Instances in parent group-v849537. [ 1022.680646] env[62476]: DEBUG oslo.service.loopingcall [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1022.680832] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1022.681042] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5603c40d-e510-4978-9516-2eda1f5cd63a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.705753] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1022.705753] env[62476]: value = "task-4319082" [ 1022.705753] env[62476]: _type = "Task" [ 1022.705753] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.715442] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319082, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.232095] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319082, 'name': CreateVM_Task} progress is 99%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.734032] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319082, 'name': CreateVM_Task, 'duration_secs': 0.556218} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.734372] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1023.735192] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1023.735936] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.736375] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1023.736750] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16a95dee-7898-4b0f-8457-c05ad304088a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.742309] env[62476]: DEBUG oslo_vmware.api [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for the task: (returnval){ [ 1023.742309] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52ee6006-e8f2-3aa5-3ac2-62ed3f064b37" [ 1023.742309] env[62476]: _type = "Task" [ 1023.742309] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.756962] env[62476]: DEBUG oslo_vmware.api [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52ee6006-e8f2-3aa5-3ac2-62ed3f064b37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.257326] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.257326] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1024.257627] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.293969] env[62476]: DEBUG nova.compute.manager [req-3325b3ba-2648-4ae0-adb3-f286ad9634d7 req-8a19d804-656c-4814-8b03-b837519a5cdd service nova] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Received event network-changed-e358b3ca-abae-45e8-a676-39380b422af4 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1024.294185] env[62476]: DEBUG nova.compute.manager [req-3325b3ba-2648-4ae0-adb3-f286ad9634d7 req-8a19d804-656c-4814-8b03-b837519a5cdd service nova] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Refreshing instance network info cache due to event network-changed-e358b3ca-abae-45e8-a676-39380b422af4. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1024.294411] env[62476]: DEBUG oslo_concurrency.lockutils [req-3325b3ba-2648-4ae0-adb3-f286ad9634d7 req-8a19d804-656c-4814-8b03-b837519a5cdd service nova] Acquiring lock "refresh_cache-87f2ddc2-11d2-49de-a3de-9e7082ab88c4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.294578] env[62476]: DEBUG oslo_concurrency.lockutils [req-3325b3ba-2648-4ae0-adb3-f286ad9634d7 req-8a19d804-656c-4814-8b03-b837519a5cdd service nova] Acquired lock "refresh_cache-87f2ddc2-11d2-49de-a3de-9e7082ab88c4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.294770] env[62476]: DEBUG nova.network.neutron [req-3325b3ba-2648-4ae0-adb3-f286ad9634d7 req-8a19d804-656c-4814-8b03-b837519a5cdd service nova] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Refreshing network info cache for port e358b3ca-abae-45e8-a676-39380b422af4 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1024.706562] env[62476]: DEBUG nova.network.neutron [req-3325b3ba-2648-4ae0-adb3-f286ad9634d7 req-8a19d804-656c-4814-8b03-b837519a5cdd service nova] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Updated VIF entry in instance network info cache for port e358b3ca-abae-45e8-a676-39380b422af4. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1024.706865] env[62476]: DEBUG nova.network.neutron [req-3325b3ba-2648-4ae0-adb3-f286ad9634d7 req-8a19d804-656c-4814-8b03-b837519a5cdd service nova] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Updating instance_info_cache with network_info: [{"id": "e358b3ca-abae-45e8-a676-39380b422af4", "address": "fa:16:3e:98:2b:62", "network": {"id": "7951b440-c6fc-4447-b736-de183b5d8603", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1845317819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cdbe9b66c724475a673e94fdb118821", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape358b3ca-ab", "ovs_interfaceid": "e358b3ca-abae-45e8-a676-39380b422af4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.717165] env[62476]: DEBUG oslo_concurrency.lockutils [req-3325b3ba-2648-4ae0-adb3-f286ad9634d7 req-8a19d804-656c-4814-8b03-b837519a5cdd service nova] Releasing lock "refresh_cache-87f2ddc2-11d2-49de-a3de-9e7082ab88c4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.991504] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Acquiring lock "bfd1d3fe-c8ba-4b77-b633-f77010674954" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.991816] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Lock "bfd1d3fe-c8ba-4b77-b633-f77010674954" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.781814] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8808f1c-cb73-4248-836f-3567c7ccc51c tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.179964] env[62476]: DEBUG oslo_concurrency.lockutils [None req-36313677-dced-4b09-ace9-93a161385cb1 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "d7ec9b10-5975-4148-9931-3e7b0999b373" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.333702] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.334038] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.971286] env[62476]: DEBUG oslo_concurrency.lockutils [None req-02a346b9-a13a-499c-a526-a8a3bfd75b23 tempest-InstanceActionsNegativeTestJSON-1079402422 tempest-InstanceActionsNegativeTestJSON-1079402422-project-member] Acquiring lock "f1e79622-36ec-4efa-9b19-d5aeb1b9d57b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.971545] env[62476]: DEBUG oslo_concurrency.lockutils [None req-02a346b9-a13a-499c-a526-a8a3bfd75b23 tempest-InstanceActionsNegativeTestJSON-1079402422 tempest-InstanceActionsNegativeTestJSON-1079402422-project-member] Lock "f1e79622-36ec-4efa-9b19-d5aeb1b9d57b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.029398] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1051.030331] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1051.030331] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1051.051141] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.051835] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.052086] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.052912] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1051.053701] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530897cd-9100-4d8b-a79f-d483adcda92b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.062875] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb289fc8-7db6-4622-a477-8b81ce653117 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.081162] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f5e046-b839-47d5-8ac6-467c37116c4e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.089801] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e23254-2680-4480-8158-7fb27ebc3364 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.120940] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180723MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1051.121119] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.121321] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.233033] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance d7ec9b10-5975-4148-9931-3e7b0999b373 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.233224] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e41d1a8c-ad7e-4151-9745-04318b007dfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.233355] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a918c107-526d-4cb7-a7dd-735a7d6420a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.233481] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ebd0c337-82cd-4d0a-9089-b9e2c72c417d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.233604] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 029e3122-7587-4675-b9d9-47cf8ffdbd1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.233720] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.233832] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0561164b-f3f9-446f-b597-4b6d16a32a00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.233978] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1e005b4d-7f94-4263-ba5d-303af209c408 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.234119] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f4e97733-101b-46dd-aec4-a3287b120eb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.234231] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1051.248483] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5169fc76-eb51-45f0-9f19-737fb3213125 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.262385] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 31afc3bf-67c8-481a-9413-e69b5d6bf74f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.275520] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 92e0c9fd-582c-4118-b7e0-0fb822b1c38e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.287347] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance eca46087-33a7-4e9d-a7ce-6094886704a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.304537] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3295b0cb-15d5-4008-bc76-95b69f2f40a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.317350] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7aab03db-43b4-4884-bc20-0a29058ea2ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.332490] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ade4daab-4b02-4664-b745-a0c799d8415d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.345806] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance cd2482db-1c9e-4b1a-bb79-b7250cb863a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.356848] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 2c553a0b-1f8b-42aa-8b64-e22bc3cac45e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.369682] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 02b24610-323b-47b0-9c3e-f397cb48835e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.387934] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4439f302-8fa4-452a-97d8-4d6c1fef36d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.400778] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1241b06a-696d-4f96-961e-95129b1ba674 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.412421] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4be38fcc-5fa9-43b3-ab33-544812082b2f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.423540] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e1c8d6f6-b179-4e47-ac13-9abeb84e5a53 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.434067] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3cdef023-ce78-4c3b-8476-5508c18204c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.446125] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance bfd1d3fe-c8ba-4b77-b633-f77010674954 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.459621] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.474251] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f1e79622-36ec-4efa-9b19-d5aeb1b9d57b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1051.474513] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1051.474661] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1051.900509] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c496ad-8f03-4ac7-a9e5-a23cacf2c3e8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.911582] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a0c40e-323f-42da-af4a-64073fe528dc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.946146] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced6eaaa-3848-4722-8336-8488193f09ae {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.954669] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40400feb-2ca3-4300-b83e-2f6bc811974b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.973127] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.982933] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1052.007378] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1052.007576] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.886s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.007645] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.007645] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1053.007645] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1053.037514] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1053.037514] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1053.037514] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1053.038010] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1053.038516] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1053.038820] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1053.039099] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1053.039374] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1053.039624] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1053.040330] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1053.040598] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1053.041849] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.041849] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.041849] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.059615] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.027436] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1055.027750] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1056.022922] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.038722] env[62476]: DEBUG oslo_concurrency.lockutils [None req-fa9d7499-add2-456f-99d9-5a05c3a1b094 tempest-ServersTestManualDisk-1677104424 tempest-ServersTestManualDisk-1677104424-project-member] Acquiring lock "a18ae56f-62d1-407e-bc7e-47907857e6b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1062.038993] env[62476]: DEBUG oslo_concurrency.lockutils [None req-fa9d7499-add2-456f-99d9-5a05c3a1b094 tempest-ServersTestManualDisk-1677104424 tempest-ServersTestManualDisk-1677104424-project-member] Lock "a18ae56f-62d1-407e-bc7e-47907857e6b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.901387] env[62476]: WARNING oslo_vmware.rw_handles [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1064.901387] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1064.901387] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1064.901387] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1064.901387] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1064.901387] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1064.901387] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1064.901387] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1064.901387] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1064.901387] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1064.901387] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1064.901387] env[62476]: ERROR oslo_vmware.rw_handles [ 1064.901990] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/985d91b8-b6ad-4a30-b881-2af8cab6ad4e/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1064.904324] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1064.904324] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Copying Virtual Disk [datastore1] vmware_temp/985d91b8-b6ad-4a30-b881-2af8cab6ad4e/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/985d91b8-b6ad-4a30-b881-2af8cab6ad4e/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1064.904507] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e1307a8-7ce1-4375-9d70-7cd129d4a71a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.912896] env[62476]: DEBUG oslo_vmware.api [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Waiting for the task: (returnval){ [ 1064.912896] env[62476]: value = "task-4319083" [ 1064.912896] env[62476]: _type = "Task" [ 1064.912896] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.921458] env[62476]: DEBUG oslo_vmware.api [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Task: {'id': task-4319083, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.424665] env[62476]: DEBUG oslo_vmware.exceptions [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1065.424957] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1065.425529] env[62476]: ERROR nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1065.425529] env[62476]: Faults: ['InvalidArgument'] [ 1065.425529] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Traceback (most recent call last): [ 1065.425529] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1065.425529] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] yield resources [ 1065.425529] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1065.425529] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] self.driver.spawn(context, instance, image_meta, [ 1065.425529] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1065.425529] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1065.425529] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1065.425529] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] self._fetch_image_if_missing(context, vi) [ 1065.425529] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1065.425868] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] image_cache(vi, tmp_image_ds_loc) [ 1065.425868] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1065.425868] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] vm_util.copy_virtual_disk( [ 1065.425868] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1065.425868] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] session._wait_for_task(vmdk_copy_task) [ 1065.425868] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1065.425868] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] return self.wait_for_task(task_ref) [ 1065.425868] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1065.425868] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] return evt.wait() [ 1065.425868] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1065.425868] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] result = hub.switch() [ 1065.425868] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1065.425868] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] return self.greenlet.switch() [ 1065.426198] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1065.426198] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] self.f(*self.args, **self.kw) [ 1065.426198] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1065.426198] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] raise exceptions.translate_fault(task_info.error) [ 1065.426198] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1065.426198] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Faults: ['InvalidArgument'] [ 1065.426198] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] [ 1065.426198] env[62476]: INFO nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Terminating instance [ 1065.427470] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.427703] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1065.428316] env[62476]: DEBUG nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1065.428514] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1065.428745] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d38bf636-c72e-45c4-9373-c995bd03fd95 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.431138] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbbbabf-e51c-44e1-b67b-bb1e99896e73 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.438358] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1065.438586] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc62514f-e5e6-4914-b6ca-5879493176e8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.442554] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1065.442730] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1065.443415] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0781b1e-02af-4a65-91a6-dff19f49a8fd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.448461] env[62476]: DEBUG oslo_vmware.api [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Waiting for the task: (returnval){ [ 1065.448461] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]5228a317-90c3-0d1e-fcb7-0c570c604c94" [ 1065.448461] env[62476]: _type = "Task" [ 1065.448461] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.456221] env[62476]: DEBUG oslo_vmware.api [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]5228a317-90c3-0d1e-fcb7-0c570c604c94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.960156] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1065.960432] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Creating directory with path [datastore1] vmware_temp/d3bfd2da-43e3-4d0d-bc9c-775256285a7b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1065.960671] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63241f34-3372-4623-a440-fef4e2712dff {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.984239] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Created directory with path [datastore1] vmware_temp/d3bfd2da-43e3-4d0d-bc9c-775256285a7b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1065.984538] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Fetch image to [datastore1] vmware_temp/d3bfd2da-43e3-4d0d-bc9c-775256285a7b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1065.984720] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/d3bfd2da-43e3-4d0d-bc9c-775256285a7b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1065.985619] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73686029-aee4-420b-b682-db9377c99643 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.993908] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90592b24-ccad-4fab-b9ed-20abc3c9cfb4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.004887] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd366ba-2a02-468f-8b94-ebd7d1fa838c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.040165] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e10157-b501-41f7-94ea-cecb6b35d820 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.047778] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1b716bfd-73d3-414d-a97f-3de1104e0628 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.071780] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1066.135469] env[62476]: DEBUG oslo_vmware.rw_handles [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d3bfd2da-43e3-4d0d-bc9c-775256285a7b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1066.199709] env[62476]: DEBUG oslo_vmware.rw_handles [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1066.199709] env[62476]: DEBUG oslo_vmware.rw_handles [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d3bfd2da-43e3-4d0d-bc9c-775256285a7b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1066.609902] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1066.610041] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1066.610633] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Deleting the datastore file [datastore1] e41d1a8c-ad7e-4151-9745-04318b007dfa {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1066.610633] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8164f54-8df0-4699-9921-c34dcbece95a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.622084] env[62476]: DEBUG oslo_vmware.api [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Waiting for the task: (returnval){ [ 1066.622084] env[62476]: value = "task-4319085" [ 1066.622084] env[62476]: _type = "Task" [ 1066.622084] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.629115] env[62476]: DEBUG oslo_vmware.api [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Task: {'id': task-4319085, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.131291] env[62476]: DEBUG oslo_vmware.api [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Task: {'id': task-4319085, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091919} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.131564] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1067.131734] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1067.131904] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1067.132100] env[62476]: INFO nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Took 1.70 seconds to destroy the instance on the hypervisor. [ 1067.134342] env[62476]: DEBUG nova.compute.claims [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1067.134519] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.134986] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.623125] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7310cc5-cf62-424b-bbd1-89bc273d90e6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.632138] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbe0582-84ef-44cb-9138-bcb089d81459 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.670020] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1edaddd-6fe5-462a-99cf-1444fd6c9e32 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.683022] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8594bd69-e172-416d-91e0-18ee31a94af9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.695997] env[62476]: DEBUG nova.compute.provider_tree [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.712028] env[62476]: DEBUG nova.scheduler.client.report [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1067.728935] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.594s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.729503] env[62476]: ERROR nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1067.729503] env[62476]: Faults: ['InvalidArgument'] [ 1067.729503] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Traceback (most recent call last): [ 1067.729503] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1067.729503] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] self.driver.spawn(context, instance, image_meta, [ 1067.729503] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1067.729503] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1067.729503] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1067.729503] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] self._fetch_image_if_missing(context, vi) [ 1067.729503] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1067.729503] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] image_cache(vi, tmp_image_ds_loc) [ 1067.729503] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1067.729845] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] vm_util.copy_virtual_disk( [ 1067.729845] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1067.729845] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] session._wait_for_task(vmdk_copy_task) [ 1067.729845] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1067.729845] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] return self.wait_for_task(task_ref) [ 1067.729845] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1067.729845] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] return evt.wait() [ 1067.729845] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1067.729845] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] result = hub.switch() [ 1067.729845] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1067.729845] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] return self.greenlet.switch() [ 1067.729845] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1067.729845] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] self.f(*self.args, **self.kw) [ 1067.730205] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1067.730205] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] raise exceptions.translate_fault(task_info.error) [ 1067.730205] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1067.730205] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Faults: ['InvalidArgument'] [ 1067.730205] env[62476]: ERROR nova.compute.manager [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] [ 1067.730348] env[62476]: DEBUG nova.compute.utils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1067.731821] env[62476]: DEBUG nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Build of instance e41d1a8c-ad7e-4151-9745-04318b007dfa was re-scheduled: A specified parameter was not correct: fileType [ 1067.731821] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1067.732327] env[62476]: DEBUG nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1067.732506] env[62476]: DEBUG nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1067.732662] env[62476]: DEBUG nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1067.732884] env[62476]: DEBUG nova.network.neutron [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1068.830170] env[62476]: DEBUG nova.network.neutron [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.848132] env[62476]: INFO nova.compute.manager [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Took 1.11 seconds to deallocate network for instance. [ 1069.006569] env[62476]: INFO nova.scheduler.client.report [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Deleted allocations for instance e41d1a8c-ad7e-4151-9745-04318b007dfa [ 1069.032268] env[62476]: DEBUG oslo_concurrency.lockutils [None req-73b6c917-b4ff-48dc-8752-cf598e7c6bf0 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Lock "e41d1a8c-ad7e-4151-9745-04318b007dfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 422.540s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.032268] env[62476]: DEBUG oslo_concurrency.lockutils [None req-945eaaaf-c885-4c0a-b27c-c7e394a45613 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Lock "e41d1a8c-ad7e-4151-9745-04318b007dfa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 223.080s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.032268] env[62476]: DEBUG oslo_concurrency.lockutils [None req-945eaaaf-c885-4c0a-b27c-c7e394a45613 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Acquiring lock "e41d1a8c-ad7e-4151-9745-04318b007dfa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.032601] env[62476]: DEBUG oslo_concurrency.lockutils [None req-945eaaaf-c885-4c0a-b27c-c7e394a45613 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Lock "e41d1a8c-ad7e-4151-9745-04318b007dfa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.032751] env[62476]: DEBUG oslo_concurrency.lockutils [None req-945eaaaf-c885-4c0a-b27c-c7e394a45613 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Lock "e41d1a8c-ad7e-4151-9745-04318b007dfa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.035384] env[62476]: INFO nova.compute.manager [None req-945eaaaf-c885-4c0a-b27c-c7e394a45613 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Terminating instance [ 1069.037169] env[62476]: DEBUG nova.compute.manager [None req-945eaaaf-c885-4c0a-b27c-c7e394a45613 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1069.037372] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-945eaaaf-c885-4c0a-b27c-c7e394a45613 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1069.037979] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6935fe4-7bcc-4322-89fd-bdb23bd739d1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.048962] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7824327c-30fa-4271-adb9-343867dfa8ca {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.065710] env[62476]: DEBUG nova.compute.manager [None req-3f808bd9-50bd-474a-9bcd-3ff9cacd86dc tempest-ServersTestJSON-2046354372 tempest-ServersTestJSON-2046354372-project-member] [instance: 5169fc76-eb51-45f0-9f19-737fb3213125] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1069.088025] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-945eaaaf-c885-4c0a-b27c-c7e394a45613 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e41d1a8c-ad7e-4151-9745-04318b007dfa could not be found. [ 1069.088025] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-945eaaaf-c885-4c0a-b27c-c7e394a45613 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1069.088025] env[62476]: INFO nova.compute.manager [None req-945eaaaf-c885-4c0a-b27c-c7e394a45613 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1069.088178] env[62476]: DEBUG oslo.service.loopingcall [None req-945eaaaf-c885-4c0a-b27c-c7e394a45613 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1069.088326] env[62476]: DEBUG nova.compute.manager [-] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1069.088518] env[62476]: DEBUG nova.network.neutron [-] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1069.098861] env[62476]: DEBUG nova.compute.manager [None req-3f808bd9-50bd-474a-9bcd-3ff9cacd86dc tempest-ServersTestJSON-2046354372 tempest-ServersTestJSON-2046354372-project-member] [instance: 5169fc76-eb51-45f0-9f19-737fb3213125] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1069.132618] env[62476]: DEBUG nova.network.neutron [-] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.134357] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3f808bd9-50bd-474a-9bcd-3ff9cacd86dc tempest-ServersTestJSON-2046354372 tempest-ServersTestJSON-2046354372-project-member] Lock "5169fc76-eb51-45f0-9f19-737fb3213125" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.663s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.143548] env[62476]: INFO nova.compute.manager [-] [instance: e41d1a8c-ad7e-4151-9745-04318b007dfa] Took 0.05 seconds to deallocate network for instance. [ 1069.151820] env[62476]: DEBUG nova.compute.manager [None req-9d62f1e0-dce6-49b6-86c5-d1c1cebe383f tempest-MigrationsAdminTest-1701507521 tempest-MigrationsAdminTest-1701507521-project-member] [instance: 31afc3bf-67c8-481a-9413-e69b5d6bf74f] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1069.182921] env[62476]: DEBUG nova.compute.manager [None req-9d62f1e0-dce6-49b6-86c5-d1c1cebe383f tempest-MigrationsAdminTest-1701507521 tempest-MigrationsAdminTest-1701507521-project-member] [instance: 31afc3bf-67c8-481a-9413-e69b5d6bf74f] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1069.226712] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d62f1e0-dce6-49b6-86c5-d1c1cebe383f tempest-MigrationsAdminTest-1701507521 tempest-MigrationsAdminTest-1701507521-project-member] Lock "31afc3bf-67c8-481a-9413-e69b5d6bf74f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.420s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.263015] env[62476]: DEBUG nova.compute.manager [None req-e47f7d9b-0846-4022-b0d4-f184daa30b13 tempest-ServerActionsTestOtherA-421018234 tempest-ServerActionsTestOtherA-421018234-project-member] [instance: 92e0c9fd-582c-4118-b7e0-0fb822b1c38e] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1069.272805] env[62476]: DEBUG oslo_concurrency.lockutils [None req-945eaaaf-c885-4c0a-b27c-c7e394a45613 tempest-ServerExternalEventsTest-1041193458 tempest-ServerExternalEventsTest-1041193458-project-member] Lock "e41d1a8c-ad7e-4151-9745-04318b007dfa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.238s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.304930] env[62476]: DEBUG nova.compute.manager [None req-e47f7d9b-0846-4022-b0d4-f184daa30b13 tempest-ServerActionsTestOtherA-421018234 tempest-ServerActionsTestOtherA-421018234-project-member] [instance: 92e0c9fd-582c-4118-b7e0-0fb822b1c38e] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1069.332388] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e47f7d9b-0846-4022-b0d4-f184daa30b13 tempest-ServerActionsTestOtherA-421018234 tempest-ServerActionsTestOtherA-421018234-project-member] Lock "92e0c9fd-582c-4118-b7e0-0fb822b1c38e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.672s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.356241] env[62476]: DEBUG nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1069.455753] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.456027] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.458151] env[62476]: INFO nova.compute.claims [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1069.939063] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956f829e-f86d-4c42-b74a-dd11cf9f2eae {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.947815] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b2da92-92bf-40ee-9ffb-a0a94b7e9f0a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.980267] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a23bb3e-ca51-4018-a8c4-f1c4e280a170 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.989385] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46894faa-fbcf-4009-a9e9-215b7c31b844 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.005071] env[62476]: DEBUG nova.compute.provider_tree [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.028475] env[62476]: DEBUG nova.scheduler.client.report [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1070.050539] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.594s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.051592] env[62476]: DEBUG nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1070.108773] env[62476]: DEBUG nova.compute.utils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1070.110597] env[62476]: DEBUG nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1070.110597] env[62476]: DEBUG nova.network.neutron [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1070.128110] env[62476]: DEBUG nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1070.222166] env[62476]: DEBUG nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1070.242578] env[62476]: DEBUG nova.policy [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73b14417754f45e7ae53f4765e3e454f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8110e3a440894c78929977d03690e4c4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1070.257432] env[62476]: DEBUG nova.virt.hardware [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1070.257681] env[62476]: DEBUG nova.virt.hardware [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1070.257835] env[62476]: DEBUG nova.virt.hardware [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1070.258088] env[62476]: DEBUG nova.virt.hardware [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1070.258338] env[62476]: DEBUG nova.virt.hardware [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1070.258571] env[62476]: DEBUG nova.virt.hardware [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1070.258711] env[62476]: DEBUG nova.virt.hardware [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1070.258868] env[62476]: DEBUG nova.virt.hardware [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1070.259047] env[62476]: DEBUG nova.virt.hardware [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1070.259215] env[62476]: DEBUG nova.virt.hardware [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1070.259388] env[62476]: DEBUG nova.virt.hardware [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1070.260715] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66569dcb-7e1c-412a-a775-c6efd25dfb0c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.270953] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a585cc-1598-4557-bca5-0d218810b29d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.328118] env[62476]: DEBUG nova.network.neutron [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Successfully created port: 2c341e3b-58f1-48b0-8bbc-80c94d5fe721 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1072.503736] env[62476]: DEBUG oslo_concurrency.lockutils [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquiring lock "eca46087-33a7-4e9d-a7ce-6094886704a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.393842] env[62476]: DEBUG nova.network.neutron [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Successfully updated port: 2c341e3b-58f1-48b0-8bbc-80c94d5fe721 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1073.405223] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquiring lock "refresh_cache-eca46087-33a7-4e9d-a7ce-6094886704a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1073.405427] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquired lock "refresh_cache-eca46087-33a7-4e9d-a7ce-6094886704a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.406382] env[62476]: DEBUG nova.network.neutron [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1073.510812] env[62476]: DEBUG nova.network.neutron [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1073.860951] env[62476]: DEBUG nova.compute.manager [req-f66a8735-faec-4d89-96c1-62dc6514639b req-22cb4253-f836-4c32-9da3-9694d5205b10 service nova] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Received event network-vif-plugged-2c341e3b-58f1-48b0-8bbc-80c94d5fe721 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1073.861299] env[62476]: DEBUG oslo_concurrency.lockutils [req-f66a8735-faec-4d89-96c1-62dc6514639b req-22cb4253-f836-4c32-9da3-9694d5205b10 service nova] Acquiring lock "eca46087-33a7-4e9d-a7ce-6094886704a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.861387] env[62476]: DEBUG oslo_concurrency.lockutils [req-f66a8735-faec-4d89-96c1-62dc6514639b req-22cb4253-f836-4c32-9da3-9694d5205b10 service nova] Lock "eca46087-33a7-4e9d-a7ce-6094886704a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.861648] env[62476]: DEBUG oslo_concurrency.lockutils [req-f66a8735-faec-4d89-96c1-62dc6514639b req-22cb4253-f836-4c32-9da3-9694d5205b10 service nova] Lock "eca46087-33a7-4e9d-a7ce-6094886704a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.861709] env[62476]: DEBUG nova.compute.manager [req-f66a8735-faec-4d89-96c1-62dc6514639b req-22cb4253-f836-4c32-9da3-9694d5205b10 service nova] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] No waiting events found dispatching network-vif-plugged-2c341e3b-58f1-48b0-8bbc-80c94d5fe721 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1073.861890] env[62476]: WARNING nova.compute.manager [req-f66a8735-faec-4d89-96c1-62dc6514639b req-22cb4253-f836-4c32-9da3-9694d5205b10 service nova] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Received unexpected event network-vif-plugged-2c341e3b-58f1-48b0-8bbc-80c94d5fe721 for instance with vm_state building and task_state deleting. [ 1073.924143] env[62476]: DEBUG nova.network.neutron [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Updating instance_info_cache with network_info: [{"id": "2c341e3b-58f1-48b0-8bbc-80c94d5fe721", "address": "fa:16:3e:43:75:4b", "network": {"id": "4b66ccce-f5b8-4186-91e8-194f9dfd2cc6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1018339814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8110e3a440894c78929977d03690e4c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c341e3b-58", "ovs_interfaceid": "2c341e3b-58f1-48b0-8bbc-80c94d5fe721", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.947270] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Releasing lock "refresh_cache-eca46087-33a7-4e9d-a7ce-6094886704a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.947596] env[62476]: DEBUG nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Instance network_info: |[{"id": "2c341e3b-58f1-48b0-8bbc-80c94d5fe721", "address": "fa:16:3e:43:75:4b", "network": {"id": "4b66ccce-f5b8-4186-91e8-194f9dfd2cc6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1018339814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8110e3a440894c78929977d03690e4c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c341e3b-58", "ovs_interfaceid": "2c341e3b-58f1-48b0-8bbc-80c94d5fe721", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1073.948039] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:75:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2be3fdb5-359e-43bd-8c20-2ff00e81db55', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c341e3b-58f1-48b0-8bbc-80c94d5fe721', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1073.960398] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Creating folder: Project (8110e3a440894c78929977d03690e4c4). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1073.965768] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe811afe-524e-40d8-955e-c3c45ca0774e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.977030] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Created folder: Project (8110e3a440894c78929977d03690e4c4) in parent group-v849485. [ 1073.977030] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Creating folder: Instances. Parent ref: group-v849540. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1073.977030] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e3b4ba8-1904-4e7e-81ab-47b2f87e73e3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.991204] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Created folder: Instances in parent group-v849540. [ 1073.991204] env[62476]: DEBUG oslo.service.loopingcall [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1073.991204] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1073.991204] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a286ffe-2f38-41fc-99ec-75f780c93b56 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.024022] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1074.024022] env[62476]: value = "task-4319088" [ 1074.024022] env[62476]: _type = "Task" [ 1074.024022] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.031720] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319088, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.533158] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319088, 'name': CreateVM_Task, 'duration_secs': 0.346661} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.533158] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1074.534361] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1074.534644] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.535065] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1074.535436] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20023a8c-2968-4655-9838-c2cb744bc0a8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.541295] env[62476]: DEBUG oslo_vmware.api [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Waiting for the task: (returnval){ [ 1074.541295] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]5260006f-492f-299b-8a51-9857280fe21e" [ 1074.541295] env[62476]: _type = "Task" [ 1074.541295] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.556991] env[62476]: DEBUG oslo_vmware.api [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]5260006f-492f-299b-8a51-9857280fe21e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.563493] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "4954bf5d-20db-4787-91b5-a990ed30cdf3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.563731] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "4954bf5d-20db-4787-91b5-a990ed30cdf3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.058630] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1075.058898] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1075.059141] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.049687] env[62476]: DEBUG nova.compute.manager [req-939b238a-3977-46e2-b44d-2d5b1c9e58c2 req-8aa4ec82-1e32-43d1-a369-d31ae6ca4e85 service nova] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Received event network-changed-2c341e3b-58f1-48b0-8bbc-80c94d5fe721 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1076.049937] env[62476]: DEBUG nova.compute.manager [req-939b238a-3977-46e2-b44d-2d5b1c9e58c2 req-8aa4ec82-1e32-43d1-a369-d31ae6ca4e85 service nova] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Refreshing instance network info cache due to event network-changed-2c341e3b-58f1-48b0-8bbc-80c94d5fe721. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1076.050464] env[62476]: DEBUG oslo_concurrency.lockutils [req-939b238a-3977-46e2-b44d-2d5b1c9e58c2 req-8aa4ec82-1e32-43d1-a369-d31ae6ca4e85 service nova] Acquiring lock "refresh_cache-eca46087-33a7-4e9d-a7ce-6094886704a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.050464] env[62476]: DEBUG oslo_concurrency.lockutils [req-939b238a-3977-46e2-b44d-2d5b1c9e58c2 req-8aa4ec82-1e32-43d1-a369-d31ae6ca4e85 service nova] Acquired lock "refresh_cache-eca46087-33a7-4e9d-a7ce-6094886704a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.050464] env[62476]: DEBUG nova.network.neutron [req-939b238a-3977-46e2-b44d-2d5b1c9e58c2 req-8aa4ec82-1e32-43d1-a369-d31ae6ca4e85 service nova] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Refreshing network info cache for port 2c341e3b-58f1-48b0-8bbc-80c94d5fe721 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1076.856653] env[62476]: DEBUG nova.network.neutron [req-939b238a-3977-46e2-b44d-2d5b1c9e58c2 req-8aa4ec82-1e32-43d1-a369-d31ae6ca4e85 service nova] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Updated VIF entry in instance network info cache for port 2c341e3b-58f1-48b0-8bbc-80c94d5fe721. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1076.858721] env[62476]: DEBUG nova.network.neutron [req-939b238a-3977-46e2-b44d-2d5b1c9e58c2 req-8aa4ec82-1e32-43d1-a369-d31ae6ca4e85 service nova] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Updating instance_info_cache with network_info: [{"id": "2c341e3b-58f1-48b0-8bbc-80c94d5fe721", "address": "fa:16:3e:43:75:4b", "network": {"id": "4b66ccce-f5b8-4186-91e8-194f9dfd2cc6", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1018339814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8110e3a440894c78929977d03690e4c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c341e3b-58", "ovs_interfaceid": "2c341e3b-58f1-48b0-8bbc-80c94d5fe721", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.881377] env[62476]: DEBUG oslo_concurrency.lockutils [req-939b238a-3977-46e2-b44d-2d5b1c9e58c2 req-8aa4ec82-1e32-43d1-a369-d31ae6ca4e85 service nova] Releasing lock "refresh_cache-eca46087-33a7-4e9d-a7ce-6094886704a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1077.222405] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d141ef50-6c5c-4c75-a49f-f3f29f0300be tempest-ServerRescueTestJSONUnderV235-907344075 tempest-ServerRescueTestJSONUnderV235-907344075-project-member] Acquiring lock "51f4fbdd-836c-4645-8e63-af9827234d7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.222710] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d141ef50-6c5c-4c75-a49f-f3f29f0300be tempest-ServerRescueTestJSONUnderV235-907344075 tempest-ServerRescueTestJSONUnderV235-907344075-project-member] Lock "51f4fbdd-836c-4645-8e63-af9827234d7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.000916] env[62476]: DEBUG oslo_concurrency.lockutils [None req-28c0894c-b7dd-4d37-b3c4-e6bc32b4f71a tempest-AttachVolumeTestJSON-2018915431 tempest-AttachVolumeTestJSON-2018915431-project-member] Acquiring lock "da13b71e-709e-4b89-82d7-d4f30c319f9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.000916] env[62476]: DEBUG oslo_concurrency.lockutils [None req-28c0894c-b7dd-4d37-b3c4-e6bc32b4f71a tempest-AttachVolumeTestJSON-2018915431 tempest-AttachVolumeTestJSON-2018915431-project-member] Lock "da13b71e-709e-4b89-82d7-d4f30c319f9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.094245] env[62476]: DEBUG oslo_concurrency.lockutils [None req-18b8d733-109e-4c79-887f-58bce9943f88 tempest-ServerTagsTestJSON-1565539301 tempest-ServerTagsTestJSON-1565539301-project-member] Acquiring lock "afb77e4b-c7d1-4743-b9ca-1e729371a334" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.094245] env[62476]: DEBUG oslo_concurrency.lockutils [None req-18b8d733-109e-4c79-887f-58bce9943f88 tempest-ServerTagsTestJSON-1565539301 tempest-ServerTagsTestJSON-1565539301-project-member] Lock "afb77e4b-c7d1-4743-b9ca-1e729371a334" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.027023] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.027421] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1111.027977] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.046311] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.046739] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.046815] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.047133] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1111.050160] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2eb3be-6d29-478b-ba91-e01663ec029b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.065747] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fac8081-7409-403c-949b-87a8e95f1d59 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.079882] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0e430a-6a46-4d86-8ed2-c112113836e4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.087796] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935bd30c-f2b5-48c1-843e-1c4b97ebe16f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.119881] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180672MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1111.120504] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.120748] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.214026] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance d7ec9b10-5975-4148-9931-3e7b0999b373 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1111.214026] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a918c107-526d-4cb7-a7dd-735a7d6420a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1111.214026] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ebd0c337-82cd-4d0a-9089-b9e2c72c417d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1111.214026] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 029e3122-7587-4675-b9d9-47cf8ffdbd1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1111.214285] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1111.214285] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0561164b-f3f9-446f-b597-4b6d16a32a00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1111.214285] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1e005b4d-7f94-4263-ba5d-303af209c408 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1111.214399] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f4e97733-101b-46dd-aec4-a3287b120eb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1111.214513] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1111.214705] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance eca46087-33a7-4e9d-a7ce-6094886704a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1111.230084] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4be38fcc-5fa9-43b3-ab33-544812082b2f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1111.242646] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e1c8d6f6-b179-4e47-ac13-9abeb84e5a53 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1111.257970] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3cdef023-ce78-4c3b-8476-5508c18204c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1111.269211] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance bfd1d3fe-c8ba-4b77-b633-f77010674954 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1111.282558] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1111.293616] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f1e79622-36ec-4efa-9b19-d5aeb1b9d57b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1111.307204] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a18ae56f-62d1-407e-bc7e-47907857e6b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1111.322678] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4954bf5d-20db-4787-91b5-a990ed30cdf3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1111.335325] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 51f4fbdd-836c-4645-8e63-af9827234d7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1111.348041] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance da13b71e-709e-4b89-82d7-d4f30c319f9b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1111.359918] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance afb77e4b-c7d1-4743-b9ca-1e729371a334 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1111.360181] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1111.360328] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1111.642949] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467a3d7a-304d-45de-b403-f3314f1d786c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.650929] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e20a29-0c82-459e-a550-f5b7a7c99e85 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.680100] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3815abc8-eb65-43e3-9958-c000b599faeb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.687859] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2216a87-dc00-4875-803b-47ca4a3fa177 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.695638] env[62476]: DEBUG oslo_concurrency.lockutils [None req-22b8c072-ab47-4bbc-bbde-9e1dc727e74c tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] Acquiring lock "e23febc5-e647-4640-afbd-bb28c9483283" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.695920] env[62476]: DEBUG oslo_concurrency.lockutils [None req-22b8c072-ab47-4bbc-bbde-9e1dc727e74c tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] Lock "e23febc5-e647-4640-afbd-bb28c9483283" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.704971] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1111.715039] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1111.728284] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1111.728487] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.608s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.729250] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.729558] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1112.729558] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1112.749943] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1112.750116] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1112.750252] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1112.750375] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1112.750496] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1112.750614] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1112.750746] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1112.750865] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1112.750981] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1112.751110] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1112.751230] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1113.026483] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1113.026710] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1113.026872] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.388344] env[62476]: WARNING oslo_vmware.rw_handles [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1114.388344] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1114.388344] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1114.388344] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1114.388344] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1114.388344] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1114.388344] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1114.388344] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1114.388344] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1114.388344] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1114.388344] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1114.388344] env[62476]: ERROR oslo_vmware.rw_handles [ 1114.388938] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/d3bfd2da-43e3-4d0d-bc9c-775256285a7b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1114.391395] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1114.391710] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Copying Virtual Disk [datastore1] vmware_temp/d3bfd2da-43e3-4d0d-bc9c-775256285a7b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/d3bfd2da-43e3-4d0d-bc9c-775256285a7b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1114.392061] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-532f7c20-73df-4d8b-9b5c-39b704a7f32f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.400896] env[62476]: DEBUG oslo_vmware.api [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Waiting for the task: (returnval){ [ 1114.400896] env[62476]: value = "task-4319089" [ 1114.400896] env[62476]: _type = "Task" [ 1114.400896] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.411605] env[62476]: DEBUG oslo_vmware.api [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Task: {'id': task-4319089, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.911420] env[62476]: DEBUG oslo_vmware.exceptions [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1114.911736] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.912347] env[62476]: ERROR nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1114.912347] env[62476]: Faults: ['InvalidArgument'] [ 1114.912347] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Traceback (most recent call last): [ 1114.912347] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1114.912347] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] yield resources [ 1114.912347] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1114.912347] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] self.driver.spawn(context, instance, image_meta, [ 1114.912347] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1114.912347] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1114.912347] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1114.912347] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] self._fetch_image_if_missing(context, vi) [ 1114.912347] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1114.912747] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] image_cache(vi, tmp_image_ds_loc) [ 1114.912747] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1114.912747] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] vm_util.copy_virtual_disk( [ 1114.912747] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1114.912747] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] session._wait_for_task(vmdk_copy_task) [ 1114.912747] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1114.912747] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] return self.wait_for_task(task_ref) [ 1114.912747] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1114.912747] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] return evt.wait() [ 1114.912747] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1114.912747] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] result = hub.switch() [ 1114.912747] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1114.912747] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] return self.greenlet.switch() [ 1114.913099] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1114.913099] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] self.f(*self.args, **self.kw) [ 1114.913099] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1114.913099] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] raise exceptions.translate_fault(task_info.error) [ 1114.913099] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1114.913099] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Faults: ['InvalidArgument'] [ 1114.913099] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] [ 1114.913099] env[62476]: INFO nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Terminating instance [ 1114.914325] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.914539] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1114.915193] env[62476]: DEBUG nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1114.915389] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1114.915618] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40edcf77-8d39-48dc-ae06-c18ca1c598c5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.918126] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f85ca9-c734-48d7-928f-c9a2a0c5d01f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.927120] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1114.927120] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6498963-33e0-44b8-be07-524f1e68369b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.928694] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1114.928919] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1114.929938] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c229e96-b1bb-4bdf-b471-b0229a558146 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.935301] env[62476]: DEBUG oslo_vmware.api [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Waiting for the task: (returnval){ [ 1114.935301] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52d7f2d6-243b-8aa0-b3ba-3762e836ea04" [ 1114.935301] env[62476]: _type = "Task" [ 1114.935301] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.943876] env[62476]: DEBUG oslo_vmware.api [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52d7f2d6-243b-8aa0-b3ba-3762e836ea04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.020660] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1115.020899] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1115.021098] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Deleting the datastore file [datastore1] d7ec9b10-5975-4148-9931-3e7b0999b373 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1115.021400] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7bab4624-bf02-431a-9734-7290ba6017da {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.023510] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.026546] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.028976] env[62476]: DEBUG oslo_vmware.api [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Waiting for the task: (returnval){ [ 1115.028976] env[62476]: value = "task-4319091" [ 1115.028976] env[62476]: _type = "Task" [ 1115.028976] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.037609] env[62476]: DEBUG oslo_vmware.api [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Task: {'id': task-4319091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.445727] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1115.446084] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Creating directory with path [datastore1] vmware_temp/4bdd7650-f017-4df6-ad52-e0db2c351e50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1115.446369] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04f50d28-410c-4a9e-8bef-85374bc92445 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.460852] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Created directory with path [datastore1] vmware_temp/4bdd7650-f017-4df6-ad52-e0db2c351e50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1115.461132] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Fetch image to [datastore1] vmware_temp/4bdd7650-f017-4df6-ad52-e0db2c351e50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1115.461383] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/4bdd7650-f017-4df6-ad52-e0db2c351e50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1115.462268] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8491fc04-78ee-4b36-a8c6-8c82b9bca772 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.470642] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa4e0a8-cb37-4534-b4fa-56b77d5513e0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.481580] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ca7c26-7432-4ad8-9593-c45e35bc1169 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.515994] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44063b7-d471-4778-ba90-c1afc85655c5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.523473] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-de5e8b8b-7545-4222-878f-b488a7ee120f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.539270] env[62476]: DEBUG oslo_vmware.api [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Task: {'id': task-4319091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09185} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.539638] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1115.539901] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1115.540200] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1115.540414] env[62476]: INFO nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1115.542612] env[62476]: DEBUG nova.compute.claims [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1115.543294] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.543294] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.550021] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1115.615912] env[62476]: DEBUG oslo_vmware.rw_handles [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4bdd7650-f017-4df6-ad52-e0db2c351e50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1115.678253] env[62476]: DEBUG oslo_vmware.rw_handles [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1115.678961] env[62476]: DEBUG oslo_vmware.rw_handles [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4bdd7650-f017-4df6-ad52-e0db2c351e50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1115.990706] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2feeaba-eada-4eae-aceb-8f1e0837d89b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.000077] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da597ac1-c805-44bf-a6b0-1f9eb21d1838 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.037246] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.039115] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9af913-6fe6-4add-9530-fdae17fc5a7a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.055846] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed73cab3-e941-460c-84ca-f44e5495db17 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.081899] env[62476]: DEBUG nova.compute.provider_tree [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1116.092037] env[62476]: DEBUG nova.scheduler.client.report [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1116.109967] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.567s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.110857] env[62476]: ERROR nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1116.110857] env[62476]: Faults: ['InvalidArgument'] [ 1116.110857] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Traceback (most recent call last): [ 1116.110857] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1116.110857] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] self.driver.spawn(context, instance, image_meta, [ 1116.110857] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1116.110857] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1116.110857] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1116.110857] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] self._fetch_image_if_missing(context, vi) [ 1116.110857] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1116.110857] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] image_cache(vi, tmp_image_ds_loc) [ 1116.110857] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1116.111255] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] vm_util.copy_virtual_disk( [ 1116.111255] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1116.111255] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] session._wait_for_task(vmdk_copy_task) [ 1116.111255] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1116.111255] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] return self.wait_for_task(task_ref) [ 1116.111255] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1116.111255] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] return evt.wait() [ 1116.111255] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1116.111255] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] result = hub.switch() [ 1116.111255] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1116.111255] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] return self.greenlet.switch() [ 1116.111255] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1116.111255] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] self.f(*self.args, **self.kw) [ 1116.111567] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1116.111567] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] raise exceptions.translate_fault(task_info.error) [ 1116.111567] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1116.111567] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Faults: ['InvalidArgument'] [ 1116.111567] env[62476]: ERROR nova.compute.manager [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] [ 1116.111918] env[62476]: DEBUG nova.compute.utils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1116.114069] env[62476]: DEBUG nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Build of instance d7ec9b10-5975-4148-9931-3e7b0999b373 was re-scheduled: A specified parameter was not correct: fileType [ 1116.114069] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1116.114669] env[62476]: DEBUG nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1116.114969] env[62476]: DEBUG nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1116.115297] env[62476]: DEBUG nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1116.115593] env[62476]: DEBUG nova.network.neutron [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1116.870568] env[62476]: DEBUG nova.network.neutron [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.884558] env[62476]: INFO nova.compute.manager [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Took 0.77 seconds to deallocate network for instance. [ 1117.044901] env[62476]: INFO nova.scheduler.client.report [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Deleted allocations for instance d7ec9b10-5975-4148-9931-3e7b0999b373 [ 1117.080699] env[62476]: DEBUG oslo_concurrency.lockutils [None req-53bf7cdd-5988-4c0a-a24d-d4a9433403b9 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "d7ec9b10-5975-4148-9931-3e7b0999b373" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 471.322s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.082144] env[62476]: DEBUG oslo_concurrency.lockutils [None req-36313677-dced-4b09-ace9-93a161385cb1 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "d7ec9b10-5975-4148-9931-3e7b0999b373" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 70.902s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.085127] env[62476]: DEBUG oslo_concurrency.lockutils [None req-36313677-dced-4b09-ace9-93a161385cb1 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "d7ec9b10-5975-4148-9931-3e7b0999b373-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.085127] env[62476]: DEBUG oslo_concurrency.lockutils [None req-36313677-dced-4b09-ace9-93a161385cb1 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "d7ec9b10-5975-4148-9931-3e7b0999b373-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.085127] env[62476]: DEBUG oslo_concurrency.lockutils [None req-36313677-dced-4b09-ace9-93a161385cb1 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "d7ec9b10-5975-4148-9931-3e7b0999b373-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.087773] env[62476]: INFO nova.compute.manager [None req-36313677-dced-4b09-ace9-93a161385cb1 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Terminating instance [ 1117.089049] env[62476]: DEBUG nova.compute.manager [None req-36313677-dced-4b09-ace9-93a161385cb1 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1117.089249] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-36313677-dced-4b09-ace9-93a161385cb1 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1117.089502] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-67097d9a-57d1-4ad8-b339-8b520915350b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.104033] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f45915-1bc5-4156-9676-4b59627a10a8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.117908] env[62476]: DEBUG nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: 3295b0cb-15d5-4008-bc76-95b69f2f40a3] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1117.141696] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-36313677-dced-4b09-ace9-93a161385cb1 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d7ec9b10-5975-4148-9931-3e7b0999b373 could not be found. [ 1117.142766] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-36313677-dced-4b09-ace9-93a161385cb1 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1117.142766] env[62476]: INFO nova.compute.manager [None req-36313677-dced-4b09-ace9-93a161385cb1 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1117.142766] env[62476]: DEBUG oslo.service.loopingcall [None req-36313677-dced-4b09-ace9-93a161385cb1 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1117.142766] env[62476]: DEBUG nova.compute.manager [-] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1117.142766] env[62476]: DEBUG nova.network.neutron [-] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1117.155251] env[62476]: DEBUG nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: 3295b0cb-15d5-4008-bc76-95b69f2f40a3] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1117.186390] env[62476]: DEBUG nova.network.neutron [-] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.189676] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "3295b0cb-15d5-4008-bc76-95b69f2f40a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.959s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.212677] env[62476]: INFO nova.compute.manager [-] [instance: d7ec9b10-5975-4148-9931-3e7b0999b373] Took 0.07 seconds to deallocate network for instance. [ 1117.219309] env[62476]: DEBUG nova.compute.manager [None req-3b7952cc-98c8-414e-8011-f1089c43829d tempest-ServerPasswordTestJSON-1745514762 tempest-ServerPasswordTestJSON-1745514762-project-member] [instance: 7aab03db-43b4-4884-bc20-0a29058ea2ed] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1117.254879] env[62476]: DEBUG nova.compute.manager [None req-3b7952cc-98c8-414e-8011-f1089c43829d tempest-ServerPasswordTestJSON-1745514762 tempest-ServerPasswordTestJSON-1745514762-project-member] [instance: 7aab03db-43b4-4884-bc20-0a29058ea2ed] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1117.300108] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3b7952cc-98c8-414e-8011-f1089c43829d tempest-ServerPasswordTestJSON-1745514762 tempest-ServerPasswordTestJSON-1745514762-project-member] Lock "7aab03db-43b4-4884-bc20-0a29058ea2ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.227s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.317950] env[62476]: DEBUG nova.compute.manager [None req-f6122ed3-43ce-497f-9465-9e2f2a90d52e tempest-AttachVolumeTestJSON-2018915431 tempest-AttachVolumeTestJSON-2018915431-project-member] [instance: ade4daab-4b02-4664-b745-a0c799d8415d] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1117.398522] env[62476]: DEBUG oslo_concurrency.lockutils [None req-36313677-dced-4b09-ace9-93a161385cb1 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "d7ec9b10-5975-4148-9931-3e7b0999b373" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.316s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.399662] env[62476]: DEBUG nova.compute.manager [None req-f6122ed3-43ce-497f-9465-9e2f2a90d52e tempest-AttachVolumeTestJSON-2018915431 tempest-AttachVolumeTestJSON-2018915431-project-member] [instance: ade4daab-4b02-4664-b745-a0c799d8415d] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1117.436061] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f6122ed3-43ce-497f-9465-9e2f2a90d52e tempest-AttachVolumeTestJSON-2018915431 tempest-AttachVolumeTestJSON-2018915431-project-member] Lock "ade4daab-4b02-4664-b745-a0c799d8415d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.886s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.450805] env[62476]: DEBUG nova.compute.manager [None req-59b74bcb-bd32-4100-84b7-3950a1100cff tempest-ServersNegativeTestJSON-2144930018 tempest-ServersNegativeTestJSON-2144930018-project-member] [instance: cd2482db-1c9e-4b1a-bb79-b7250cb863a8] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1117.489732] env[62476]: DEBUG nova.compute.manager [None req-59b74bcb-bd32-4100-84b7-3950a1100cff tempest-ServersNegativeTestJSON-2144930018 tempest-ServersNegativeTestJSON-2144930018-project-member] [instance: cd2482db-1c9e-4b1a-bb79-b7250cb863a8] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1117.511255] env[62476]: DEBUG oslo_concurrency.lockutils [None req-59b74bcb-bd32-4100-84b7-3950a1100cff tempest-ServersNegativeTestJSON-2144930018 tempest-ServersNegativeTestJSON-2144930018-project-member] Lock "cd2482db-1c9e-4b1a-bb79-b7250cb863a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.510s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.525258] env[62476]: DEBUG nova.compute.manager [None req-cd41f60f-7b50-4fad-bdf0-cdc7e1389792 tempest-VolumesAdminNegativeTest-1065309950 tempest-VolumesAdminNegativeTest-1065309950-project-member] [instance: 2c553a0b-1f8b-42aa-8b64-e22bc3cac45e] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1117.553343] env[62476]: DEBUG nova.compute.manager [None req-cd41f60f-7b50-4fad-bdf0-cdc7e1389792 tempest-VolumesAdminNegativeTest-1065309950 tempest-VolumesAdminNegativeTest-1065309950-project-member] [instance: 2c553a0b-1f8b-42aa-8b64-e22bc3cac45e] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1117.583199] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cd41f60f-7b50-4fad-bdf0-cdc7e1389792 tempest-VolumesAdminNegativeTest-1065309950 tempest-VolumesAdminNegativeTest-1065309950-project-member] Lock "2c553a0b-1f8b-42aa-8b64-e22bc3cac45e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.742s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.596172] env[62476]: DEBUG nova.compute.manager [None req-ab8492c9-a05b-4c8c-aa92-779d2fd554a1 tempest-InstanceActionsTestJSON-845501273 tempest-InstanceActionsTestJSON-845501273-project-member] [instance: 02b24610-323b-47b0-9c3e-f397cb48835e] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1117.624823] env[62476]: DEBUG nova.compute.manager [None req-ab8492c9-a05b-4c8c-aa92-779d2fd554a1 tempest-InstanceActionsTestJSON-845501273 tempest-InstanceActionsTestJSON-845501273-project-member] [instance: 02b24610-323b-47b0-9c3e-f397cb48835e] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1117.654444] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab8492c9-a05b-4c8c-aa92-779d2fd554a1 tempest-InstanceActionsTestJSON-845501273 tempest-InstanceActionsTestJSON-845501273-project-member] Lock "02b24610-323b-47b0-9c3e-f397cb48835e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.815s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.672970] env[62476]: DEBUG nova.compute.manager [None req-6ccdac5e-dac4-4774-91df-45313dc42124 tempest-ServerGroupTestJSON-560967850 tempest-ServerGroupTestJSON-560967850-project-member] [instance: 4439f302-8fa4-452a-97d8-4d6c1fef36d0] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1117.706431] env[62476]: DEBUG nova.compute.manager [None req-6ccdac5e-dac4-4774-91df-45313dc42124 tempest-ServerGroupTestJSON-560967850 tempest-ServerGroupTestJSON-560967850-project-member] [instance: 4439f302-8fa4-452a-97d8-4d6c1fef36d0] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1117.736587] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6ccdac5e-dac4-4774-91df-45313dc42124 tempest-ServerGroupTestJSON-560967850 tempest-ServerGroupTestJSON-560967850-project-member] Lock "4439f302-8fa4-452a-97d8-4d6c1fef36d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.483s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.747016] env[62476]: DEBUG nova.compute.manager [None req-acfdfcd5-59f8-4392-b9e2-8d61199ba69d tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] [instance: 1241b06a-696d-4f96-961e-95129b1ba674] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1117.777786] env[62476]: DEBUG nova.compute.manager [None req-acfdfcd5-59f8-4392-b9e2-8d61199ba69d tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] [instance: 1241b06a-696d-4f96-961e-95129b1ba674] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1117.806734] env[62476]: DEBUG oslo_concurrency.lockutils [None req-acfdfcd5-59f8-4392-b9e2-8d61199ba69d tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] Lock "1241b06a-696d-4f96-961e-95129b1ba674" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.063s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.817704] env[62476]: DEBUG nova.compute.manager [None req-5e11bcea-2717-4d76-82e2-bfcbcdcca495 tempest-ServerShowV257Test-510154599 tempest-ServerShowV257Test-510154599-project-member] [instance: 4be38fcc-5fa9-43b3-ab33-544812082b2f] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1117.851425] env[62476]: DEBUG nova.compute.manager [None req-5e11bcea-2717-4d76-82e2-bfcbcdcca495 tempest-ServerShowV257Test-510154599 tempest-ServerShowV257Test-510154599-project-member] [instance: 4be38fcc-5fa9-43b3-ab33-544812082b2f] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1117.878916] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5e11bcea-2717-4d76-82e2-bfcbcdcca495 tempest-ServerShowV257Test-510154599 tempest-ServerShowV257Test-510154599-project-member] Lock "4be38fcc-5fa9-43b3-ab33-544812082b2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.317s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.890438] env[62476]: DEBUG nova.compute.manager [None req-6e86e42d-d2c3-452f-a762-ec5afd76acaa tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: e1c8d6f6-b179-4e47-ac13-9abeb84e5a53] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1117.930975] env[62476]: DEBUG nova.compute.manager [None req-6e86e42d-d2c3-452f-a762-ec5afd76acaa tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: e1c8d6f6-b179-4e47-ac13-9abeb84e5a53] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1117.960380] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6e86e42d-d2c3-452f-a762-ec5afd76acaa tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "e1c8d6f6-b179-4e47-ac13-9abeb84e5a53" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.147s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.972021] env[62476]: DEBUG nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1118.039109] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.039432] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.041223] env[62476]: INFO nova.compute.claims [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1118.493907] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61037326-8c02-4585-989e-e286d3ffd359 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.504984] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa51d4a-93f8-4003-874b-5ca41f8a403a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.536478] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac58844-e1ad-4730-b0f4-a9d8b91a2afb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.545811] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f97edf0-7665-41ad-ab6b-abd9a51bad1d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.559418] env[62476]: DEBUG nova.compute.provider_tree [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1118.571493] env[62476]: DEBUG nova.scheduler.client.report [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1118.590429] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.551s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.590978] env[62476]: DEBUG nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1118.652063] env[62476]: DEBUG nova.compute.utils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1118.653534] env[62476]: DEBUG nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1118.654441] env[62476]: DEBUG nova.network.neutron [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1118.671314] env[62476]: DEBUG nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1118.760165] env[62476]: DEBUG nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1118.795441] env[62476]: DEBUG nova.virt.hardware [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1118.796051] env[62476]: DEBUG nova.virt.hardware [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1118.796051] env[62476]: DEBUG nova.virt.hardware [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1118.798722] env[62476]: DEBUG nova.virt.hardware [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1118.798940] env[62476]: DEBUG nova.virt.hardware [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1118.799104] env[62476]: DEBUG nova.virt.hardware [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1118.799471] env[62476]: DEBUG nova.virt.hardware [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1118.799471] env[62476]: DEBUG nova.virt.hardware [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1118.799648] env[62476]: DEBUG nova.virt.hardware [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1118.799817] env[62476]: DEBUG nova.virt.hardware [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1118.802481] env[62476]: DEBUG nova.virt.hardware [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1118.802481] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e6890a-d1fe-4b9c-8170-b48d3c0ce8a6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.805449] env[62476]: DEBUG nova.policy [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '12fee39f5d26449abe8b48cda6ee93ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5fa89a9315b4e52b46ca13dc8223fa9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1118.814422] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8bb727-8bbc-4275-9e2a-c0c877e4e637 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.328265] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a9efd60d-70ed-4621-bbff-3e849241392b tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "a5ad39b8-0d35-4a31-9279-bcce71363d95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.328265] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a9efd60d-70ed-4621-bbff-3e849241392b tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "a5ad39b8-0d35-4a31-9279-bcce71363d95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.476032] env[62476]: DEBUG nova.network.neutron [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Successfully created port: edb0c571-b584-4142-b2fd-63d7397c788c {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1120.740215] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "7211a8c4-5430-4b0c-86e7-8101ed71463e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.741207] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "7211a8c4-5430-4b0c-86e7-8101ed71463e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.880138] env[62476]: DEBUG nova.network.neutron [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Successfully updated port: edb0c571-b584-4142-b2fd-63d7397c788c {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1120.900955] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Acquiring lock "refresh_cache-3cdef023-ce78-4c3b-8476-5508c18204c2" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1120.901251] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Acquired lock "refresh_cache-3cdef023-ce78-4c3b-8476-5508c18204c2" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.901447] env[62476]: DEBUG nova.network.neutron [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1120.980678] env[62476]: DEBUG nova.network.neutron [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1121.081881] env[62476]: DEBUG nova.compute.manager [req-a7d0758e-95eb-4734-9c0d-ff6abb7c61fd req-b260b192-733b-4fe8-92a2-be382e1a221a service nova] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Received event network-vif-plugged-edb0c571-b584-4142-b2fd-63d7397c788c {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1121.082161] env[62476]: DEBUG oslo_concurrency.lockutils [req-a7d0758e-95eb-4734-9c0d-ff6abb7c61fd req-b260b192-733b-4fe8-92a2-be382e1a221a service nova] Acquiring lock "3cdef023-ce78-4c3b-8476-5508c18204c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.082374] env[62476]: DEBUG oslo_concurrency.lockutils [req-a7d0758e-95eb-4734-9c0d-ff6abb7c61fd req-b260b192-733b-4fe8-92a2-be382e1a221a service nova] Lock "3cdef023-ce78-4c3b-8476-5508c18204c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.082540] env[62476]: DEBUG oslo_concurrency.lockutils [req-a7d0758e-95eb-4734-9c0d-ff6abb7c61fd req-b260b192-733b-4fe8-92a2-be382e1a221a service nova] Lock "3cdef023-ce78-4c3b-8476-5508c18204c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.082707] env[62476]: DEBUG nova.compute.manager [req-a7d0758e-95eb-4734-9c0d-ff6abb7c61fd req-b260b192-733b-4fe8-92a2-be382e1a221a service nova] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] No waiting events found dispatching network-vif-plugged-edb0c571-b584-4142-b2fd-63d7397c788c {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1121.082872] env[62476]: WARNING nova.compute.manager [req-a7d0758e-95eb-4734-9c0d-ff6abb7c61fd req-b260b192-733b-4fe8-92a2-be382e1a221a service nova] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Received unexpected event network-vif-plugged-edb0c571-b584-4142-b2fd-63d7397c788c for instance with vm_state building and task_state spawning. [ 1121.251649] env[62476]: DEBUG nova.network.neutron [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Updating instance_info_cache with network_info: [{"id": "edb0c571-b584-4142-b2fd-63d7397c788c", "address": "fa:16:3e:68:8e:f9", "network": {"id": "d5b8b26d-7c38-44fe-b5d3-55488fdd7530", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1259779373-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5fa89a9315b4e52b46ca13dc8223fa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f16a5584-aed0-4df4-820b-5e7f15977265", "external-id": "cl2-zone-495", "segmentation_id": 495, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedb0c571-b5", "ovs_interfaceid": "edb0c571-b584-4142-b2fd-63d7397c788c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.280768] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Releasing lock "refresh_cache-3cdef023-ce78-4c3b-8476-5508c18204c2" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1121.280768] env[62476]: DEBUG nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Instance network_info: |[{"id": "edb0c571-b584-4142-b2fd-63d7397c788c", "address": "fa:16:3e:68:8e:f9", "network": {"id": "d5b8b26d-7c38-44fe-b5d3-55488fdd7530", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1259779373-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5fa89a9315b4e52b46ca13dc8223fa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f16a5584-aed0-4df4-820b-5e7f15977265", "external-id": "cl2-zone-495", "segmentation_id": 495, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedb0c571-b5", "ovs_interfaceid": "edb0c571-b584-4142-b2fd-63d7397c788c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1121.281205] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:8e:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f16a5584-aed0-4df4-820b-5e7f15977265', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'edb0c571-b584-4142-b2fd-63d7397c788c', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1121.292308] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Creating folder: Project (b5fa89a9315b4e52b46ca13dc8223fa9). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1121.293790] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77ab23df-9859-4fcd-b6fc-f833b7b4a1a1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.305911] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Created folder: Project (b5fa89a9315b4e52b46ca13dc8223fa9) in parent group-v849485. [ 1121.306145] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Creating folder: Instances. Parent ref: group-v849543. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1121.306408] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e194500-14a3-4c96-9acc-0bc9f0d1a6c3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.318084] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Created folder: Instances in parent group-v849543. [ 1121.318202] env[62476]: DEBUG oslo.service.loopingcall [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1121.318424] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1121.318646] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ffd1eaa5-3f67-485f-b507-27ec983b2760 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.347067] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1121.347067] env[62476]: value = "task-4319094" [ 1121.347067] env[62476]: _type = "Task" [ 1121.347067] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.356432] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319094, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.858327] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319094, 'name': CreateVM_Task, 'duration_secs': 0.387311} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.858872] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1121.859770] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1121.860212] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.860628] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1121.863086] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a6f66a4-cdb6-40ec-aa64-61aa84053a30 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.868898] env[62476]: DEBUG oslo_vmware.api [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Waiting for the task: (returnval){ [ 1121.868898] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]5261aaa4-c1e4-23ea-3980-aeb520daaf13" [ 1121.868898] env[62476]: _type = "Task" [ 1121.868898] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.884822] env[62476]: DEBUG oslo_vmware.api [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]5261aaa4-c1e4-23ea-3980-aeb520daaf13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.383198] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1122.384298] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1122.385112] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.140348] env[62476]: DEBUG nova.compute.manager [req-9b207adc-2619-454b-8bcb-2cda7075c240 req-dc1f6218-f388-4a1b-9e69-4fafd4b66853 service nova] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Received event network-changed-edb0c571-b584-4142-b2fd-63d7397c788c {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1123.140685] env[62476]: DEBUG nova.compute.manager [req-9b207adc-2619-454b-8bcb-2cda7075c240 req-dc1f6218-f388-4a1b-9e69-4fafd4b66853 service nova] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Refreshing instance network info cache due to event network-changed-edb0c571-b584-4142-b2fd-63d7397c788c. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1123.140685] env[62476]: DEBUG oslo_concurrency.lockutils [req-9b207adc-2619-454b-8bcb-2cda7075c240 req-dc1f6218-f388-4a1b-9e69-4fafd4b66853 service nova] Acquiring lock "refresh_cache-3cdef023-ce78-4c3b-8476-5508c18204c2" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1123.140829] env[62476]: DEBUG oslo_concurrency.lockutils [req-9b207adc-2619-454b-8bcb-2cda7075c240 req-dc1f6218-f388-4a1b-9e69-4fafd4b66853 service nova] Acquired lock "refresh_cache-3cdef023-ce78-4c3b-8476-5508c18204c2" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.141301] env[62476]: DEBUG nova.network.neutron [req-9b207adc-2619-454b-8bcb-2cda7075c240 req-dc1f6218-f388-4a1b-9e69-4fafd4b66853 service nova] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Refreshing network info cache for port edb0c571-b584-4142-b2fd-63d7397c788c {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1123.700699] env[62476]: DEBUG nova.network.neutron [req-9b207adc-2619-454b-8bcb-2cda7075c240 req-dc1f6218-f388-4a1b-9e69-4fafd4b66853 service nova] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Updated VIF entry in instance network info cache for port edb0c571-b584-4142-b2fd-63d7397c788c. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1123.701077] env[62476]: DEBUG nova.network.neutron [req-9b207adc-2619-454b-8bcb-2cda7075c240 req-dc1f6218-f388-4a1b-9e69-4fafd4b66853 service nova] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Updating instance_info_cache with network_info: [{"id": "edb0c571-b584-4142-b2fd-63d7397c788c", "address": "fa:16:3e:68:8e:f9", "network": {"id": "d5b8b26d-7c38-44fe-b5d3-55488fdd7530", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1259779373-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5fa89a9315b4e52b46ca13dc8223fa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f16a5584-aed0-4df4-820b-5e7f15977265", "external-id": "cl2-zone-495", "segmentation_id": 495, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedb0c571-b5", "ovs_interfaceid": "edb0c571-b584-4142-b2fd-63d7397c788c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.711971] env[62476]: DEBUG oslo_concurrency.lockutils [req-9b207adc-2619-454b-8bcb-2cda7075c240 req-dc1f6218-f388-4a1b-9e69-4fafd4b66853 service nova] Releasing lock "refresh_cache-3cdef023-ce78-4c3b-8476-5508c18204c2" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1124.984113] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1c604d5c-54e0-4dec-9fef-683247e7438c tempest-ServersNegativeTestMultiTenantJSON-1352401850 tempest-ServersNegativeTestMultiTenantJSON-1352401850-project-member] Acquiring lock "6084c1eb-51da-46b8-b0f5-5d41c363e831" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.984435] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1c604d5c-54e0-4dec-9fef-683247e7438c tempest-ServersNegativeTestMultiTenantJSON-1352401850 tempest-ServersNegativeTestMultiTenantJSON-1352401850-project-member] Lock "6084c1eb-51da-46b8-b0f5-5d41c363e831" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.153187] env[62476]: DEBUG oslo_concurrency.lockutils [None req-03241216-c91e-45e8-bb0c-ad6e211d8d39 tempest-ServersV294TestFqdnHostnames-1752262884 tempest-ServersV294TestFqdnHostnames-1752262884-project-member] Acquiring lock "555c7c87-4335-4cb7-9b0b-357c4a832143" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.153638] env[62476]: DEBUG oslo_concurrency.lockutils [None req-03241216-c91e-45e8-bb0c-ad6e211d8d39 tempest-ServersV294TestFqdnHostnames-1752262884 tempest-ServersV294TestFqdnHostnames-1752262884-project-member] Lock "555c7c87-4335-4cb7-9b0b-357c4a832143" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.182744] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c777aede-2539-4fc7-b12f-d7fa4530bf87 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "e77488eb-aad9-491d-95d6-a9cc39ddc2f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.183122] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c777aede-2539-4fc7-b12f-d7fa4530bf87 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "e77488eb-aad9-491d-95d6-a9cc39ddc2f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.895487] env[62476]: WARNING oslo_vmware.rw_handles [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1162.895487] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1162.895487] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1162.895487] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1162.895487] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1162.895487] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1162.895487] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1162.895487] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1162.895487] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1162.895487] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1162.895487] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1162.895487] env[62476]: ERROR oslo_vmware.rw_handles [ 1162.896150] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/4bdd7650-f017-4df6-ad52-e0db2c351e50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1162.897918] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1162.898185] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Copying Virtual Disk [datastore1] vmware_temp/4bdd7650-f017-4df6-ad52-e0db2c351e50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/4bdd7650-f017-4df6-ad52-e0db2c351e50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1162.898487] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-688679a7-8120-4b5a-a859-5afa8e5bb6dc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.906401] env[62476]: DEBUG oslo_vmware.api [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Waiting for the task: (returnval){ [ 1162.906401] env[62476]: value = "task-4319095" [ 1162.906401] env[62476]: _type = "Task" [ 1162.906401] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.916249] env[62476]: DEBUG oslo_vmware.api [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Task: {'id': task-4319095, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.417320] env[62476]: DEBUG oslo_vmware.exceptions [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1163.417648] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1163.418556] env[62476]: ERROR nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1163.418556] env[62476]: Faults: ['InvalidArgument'] [ 1163.418556] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Traceback (most recent call last): [ 1163.418556] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1163.418556] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] yield resources [ 1163.418556] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1163.418556] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] self.driver.spawn(context, instance, image_meta, [ 1163.418556] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1163.418556] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1163.418556] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1163.418556] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] self._fetch_image_if_missing(context, vi) [ 1163.418556] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1163.419181] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] image_cache(vi, tmp_image_ds_loc) [ 1163.419181] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1163.419181] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] vm_util.copy_virtual_disk( [ 1163.419181] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1163.419181] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] session._wait_for_task(vmdk_copy_task) [ 1163.419181] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1163.419181] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] return self.wait_for_task(task_ref) [ 1163.419181] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1163.419181] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] return evt.wait() [ 1163.419181] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1163.419181] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] result = hub.switch() [ 1163.419181] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1163.419181] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] return self.greenlet.switch() [ 1163.419794] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1163.419794] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] self.f(*self.args, **self.kw) [ 1163.419794] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1163.419794] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] raise exceptions.translate_fault(task_info.error) [ 1163.419794] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1163.419794] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Faults: ['InvalidArgument'] [ 1163.419794] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] [ 1163.419794] env[62476]: INFO nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Terminating instance [ 1163.420575] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.420764] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1163.421010] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8324dd5-b288-4a74-b5c8-bc569ad600a1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.423455] env[62476]: DEBUG nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1163.423649] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1163.424395] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79fcc47-bb28-4bf2-9d01-a8705ed528ec {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.431983] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1163.432275] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48148afe-c4d7-4c53-a020-ff84cda7c9eb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.434671] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1163.434835] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1163.435879] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05b303dd-e9d7-49e6-a56c-3e3e36aaf42e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.440983] env[62476]: DEBUG oslo_vmware.api [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Waiting for the task: (returnval){ [ 1163.440983] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]5272de07-2a50-b5cd-09d0-97d04124eabb" [ 1163.440983] env[62476]: _type = "Task" [ 1163.440983] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.448259] env[62476]: DEBUG oslo_vmware.api [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]5272de07-2a50-b5cd-09d0-97d04124eabb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.586408] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1163.586740] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1163.586890] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Deleting the datastore file [datastore1] a918c107-526d-4cb7-a7dd-735a7d6420a4 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1163.587219] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7371889-6c35-46bb-a432-3f9762dcd4d4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.593975] env[62476]: DEBUG oslo_vmware.api [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Waiting for the task: (returnval){ [ 1163.593975] env[62476]: value = "task-4319097" [ 1163.593975] env[62476]: _type = "Task" [ 1163.593975] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.602336] env[62476]: DEBUG oslo_vmware.api [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Task: {'id': task-4319097, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.952096] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1163.952408] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Creating directory with path [datastore1] vmware_temp/80244ada-a120-4714-a7d1-c28bdbdc255c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1163.952698] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f5602a9-39b8-4cf9-861a-ef1461479b42 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.970267] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Created directory with path [datastore1] vmware_temp/80244ada-a120-4714-a7d1-c28bdbdc255c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1163.970583] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Fetch image to [datastore1] vmware_temp/80244ada-a120-4714-a7d1-c28bdbdc255c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1163.970808] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/80244ada-a120-4714-a7d1-c28bdbdc255c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1163.971682] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa37ebcb-c7a8-4ac9-ac45-39218fdaef5c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.980204] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5ee7b8-4c1d-450f-9b66-8b5eed3f6a3a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.990553] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e10bd88-71db-4050-9575-cc7df4bc8ce9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.021319] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb9dae7-dcbf-4f56-8386-91b9b8b7ed80 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.028110] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-30656bfe-70c5-4a9a-8019-3467e9dfc2c7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.048773] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1164.105936] env[62476]: DEBUG oslo_vmware.api [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Task: {'id': task-4319097, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109286} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.106371] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1164.107564] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1164.107564] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1164.107564] env[62476]: INFO nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Took 0.68 seconds to destroy the instance on the hypervisor. [ 1164.110831] env[62476]: DEBUG nova.compute.claims [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1164.111025] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1164.111260] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1164.124598] env[62476]: DEBUG oslo_vmware.rw_handles [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/80244ada-a120-4714-a7d1-c28bdbdc255c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1164.189338] env[62476]: DEBUG oslo_vmware.rw_handles [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1164.189555] env[62476]: DEBUG oslo_vmware.rw_handles [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/80244ada-a120-4714-a7d1-c28bdbdc255c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1164.539892] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ca7cfc-ce09-432d-8600-7e7da876ee0c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.547634] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3b0a8d-68f1-490f-bcb7-7b903f7729a8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.576939] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33019a5-6d05-47de-b648-f233acd979bb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.584403] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d139cc77-5d0c-4702-8221-a3a5efeb9ce9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.597589] env[62476]: DEBUG nova.compute.provider_tree [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1164.608373] env[62476]: DEBUG nova.scheduler.client.report [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1164.659953] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.548s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.660563] env[62476]: ERROR nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1164.660563] env[62476]: Faults: ['InvalidArgument'] [ 1164.660563] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Traceback (most recent call last): [ 1164.660563] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1164.660563] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] self.driver.spawn(context, instance, image_meta, [ 1164.660563] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1164.660563] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1164.660563] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1164.660563] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] self._fetch_image_if_missing(context, vi) [ 1164.660563] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1164.660563] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] image_cache(vi, tmp_image_ds_loc) [ 1164.660563] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1164.660949] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] vm_util.copy_virtual_disk( [ 1164.660949] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1164.660949] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] session._wait_for_task(vmdk_copy_task) [ 1164.660949] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1164.660949] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] return self.wait_for_task(task_ref) [ 1164.660949] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1164.660949] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] return evt.wait() [ 1164.660949] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1164.660949] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] result = hub.switch() [ 1164.660949] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1164.660949] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] return self.greenlet.switch() [ 1164.660949] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1164.660949] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] self.f(*self.args, **self.kw) [ 1164.661401] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1164.661401] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] raise exceptions.translate_fault(task_info.error) [ 1164.661401] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1164.661401] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Faults: ['InvalidArgument'] [ 1164.661401] env[62476]: ERROR nova.compute.manager [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] [ 1164.661401] env[62476]: DEBUG nova.compute.utils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1164.663322] env[62476]: DEBUG nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Build of instance a918c107-526d-4cb7-a7dd-735a7d6420a4 was re-scheduled: A specified parameter was not correct: fileType [ 1164.663322] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1164.663866] env[62476]: DEBUG nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1164.664088] env[62476]: DEBUG nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1164.664311] env[62476]: DEBUG nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1164.664521] env[62476]: DEBUG nova.network.neutron [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1165.333990] env[62476]: DEBUG nova.network.neutron [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.378032] env[62476]: INFO nova.compute.manager [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Took 0.71 seconds to deallocate network for instance. [ 1165.822728] env[62476]: INFO nova.scheduler.client.report [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Deleted allocations for instance a918c107-526d-4cb7-a7dd-735a7d6420a4 [ 1165.892442] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1aa44ff8-0dac-4c35-8282-7aa1eb5f15d0 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "a918c107-526d-4cb7-a7dd-735a7d6420a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 516.430s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.897777] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ee183aa0-6cd4-455d-8371-50be3ded8ef2 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "a918c107-526d-4cb7-a7dd-735a7d6420a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 317.050s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.897777] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ee183aa0-6cd4-455d-8371-50be3ded8ef2 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "a918c107-526d-4cb7-a7dd-735a7d6420a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.897777] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ee183aa0-6cd4-455d-8371-50be3ded8ef2 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "a918c107-526d-4cb7-a7dd-735a7d6420a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.898078] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ee183aa0-6cd4-455d-8371-50be3ded8ef2 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "a918c107-526d-4cb7-a7dd-735a7d6420a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.898078] env[62476]: INFO nova.compute.manager [None req-ee183aa0-6cd4-455d-8371-50be3ded8ef2 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Terminating instance [ 1165.899748] env[62476]: DEBUG nova.compute.manager [None req-ee183aa0-6cd4-455d-8371-50be3ded8ef2 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1165.899748] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ee183aa0-6cd4-455d-8371-50be3ded8ef2 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1165.899748] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e4c9d3b0-e5e7-453f-9236-2283491c4b1f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.908437] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3dfcc9-7844-4653-a04f-8609af85832a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.919433] env[62476]: DEBUG nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1165.942080] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-ee183aa0-6cd4-455d-8371-50be3ded8ef2 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a918c107-526d-4cb7-a7dd-735a7d6420a4 could not be found. [ 1165.942384] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ee183aa0-6cd4-455d-8371-50be3ded8ef2 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1165.942706] env[62476]: INFO nova.compute.manager [None req-ee183aa0-6cd4-455d-8371-50be3ded8ef2 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1165.942909] env[62476]: DEBUG oslo.service.loopingcall [None req-ee183aa0-6cd4-455d-8371-50be3ded8ef2 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1165.943226] env[62476]: DEBUG nova.compute.manager [-] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1165.943355] env[62476]: DEBUG nova.network.neutron [-] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1165.985377] env[62476]: DEBUG nova.network.neutron [-] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.994998] env[62476]: INFO nova.compute.manager [-] [instance: a918c107-526d-4cb7-a7dd-735a7d6420a4] Took 0.05 seconds to deallocate network for instance. [ 1166.001424] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.001676] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.003081] env[62476]: INFO nova.compute.claims [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1166.027453] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.027661] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Cleaning up deleted instances with incomplete migration {{(pid=62476) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1166.139612] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ee183aa0-6cd4-455d-8371-50be3ded8ef2 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "a918c107-526d-4cb7-a7dd-735a7d6420a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.246s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.397514] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b02420-2640-4235-9fa7-494334feff47 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.406190] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9e9a49-94d9-4633-86a3-46520247182b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.438384] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7358b9e-d2c0-4c34-b614-3469d36ce54c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.446357] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647f2a84-d9a6-4450-b95f-f6655d1a6499 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.461058] env[62476]: DEBUG nova.compute.provider_tree [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1166.469683] env[62476]: DEBUG nova.scheduler.client.report [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1166.492148] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.490s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.492714] env[62476]: DEBUG nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1166.535564] env[62476]: DEBUG nova.compute.utils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1166.536886] env[62476]: DEBUG nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1166.538117] env[62476]: DEBUG nova.network.neutron [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1166.548046] env[62476]: DEBUG nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1166.633296] env[62476]: DEBUG nova.policy [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09d90bccc0934623a56adb8b56bd3737', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0b52d72194b94284b3177648e28aa119', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1166.660951] env[62476]: DEBUG nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1166.708394] env[62476]: DEBUG nova.virt.hardware [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1166.708648] env[62476]: DEBUG nova.virt.hardware [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1166.708806] env[62476]: DEBUG nova.virt.hardware [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1166.708991] env[62476]: DEBUG nova.virt.hardware [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1166.709154] env[62476]: DEBUG nova.virt.hardware [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1166.709338] env[62476]: DEBUG nova.virt.hardware [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1166.709589] env[62476]: DEBUG nova.virt.hardware [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1166.709758] env[62476]: DEBUG nova.virt.hardware [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1166.709927] env[62476]: DEBUG nova.virt.hardware [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1166.710105] env[62476]: DEBUG nova.virt.hardware [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1166.711358] env[62476]: DEBUG nova.virt.hardware [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1166.711358] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3911dc1-4897-415e-a209-488d0b15c4d4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.719627] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6330fd-523c-40a6-be38-c9f3d6db1dcd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.149693] env[62476]: DEBUG nova.network.neutron [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Successfully created port: 25bc8784-3f67-41aa-a0ba-e4169f5d68ee {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1167.936781] env[62476]: DEBUG nova.compute.manager [req-ac0305fd-4c75-4615-81e7-cc48887a7920 req-c339d0ed-040c-45c1-832f-1065b66b2b32 service nova] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Received event network-vif-plugged-25bc8784-3f67-41aa-a0ba-e4169f5d68ee {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1167.937139] env[62476]: DEBUG oslo_concurrency.lockutils [req-ac0305fd-4c75-4615-81e7-cc48887a7920 req-c339d0ed-040c-45c1-832f-1065b66b2b32 service nova] Acquiring lock "bfd1d3fe-c8ba-4b77-b633-f77010674954-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1167.937222] env[62476]: DEBUG oslo_concurrency.lockutils [req-ac0305fd-4c75-4615-81e7-cc48887a7920 req-c339d0ed-040c-45c1-832f-1065b66b2b32 service nova] Lock "bfd1d3fe-c8ba-4b77-b633-f77010674954-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1167.937392] env[62476]: DEBUG oslo_concurrency.lockutils [req-ac0305fd-4c75-4615-81e7-cc48887a7920 req-c339d0ed-040c-45c1-832f-1065b66b2b32 service nova] Lock "bfd1d3fe-c8ba-4b77-b633-f77010674954-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.937562] env[62476]: DEBUG nova.compute.manager [req-ac0305fd-4c75-4615-81e7-cc48887a7920 req-c339d0ed-040c-45c1-832f-1065b66b2b32 service nova] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] No waiting events found dispatching network-vif-plugged-25bc8784-3f67-41aa-a0ba-e4169f5d68ee {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1167.937729] env[62476]: WARNING nova.compute.manager [req-ac0305fd-4c75-4615-81e7-cc48887a7920 req-c339d0ed-040c-45c1-832f-1065b66b2b32 service nova] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Received unexpected event network-vif-plugged-25bc8784-3f67-41aa-a0ba-e4169f5d68ee for instance with vm_state building and task_state spawning. [ 1168.086749] env[62476]: DEBUG nova.network.neutron [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Successfully updated port: 25bc8784-3f67-41aa-a0ba-e4169f5d68ee {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1168.105196] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Acquiring lock "refresh_cache-bfd1d3fe-c8ba-4b77-b633-f77010674954" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.105502] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Acquired lock "refresh_cache-bfd1d3fe-c8ba-4b77-b633-f77010674954" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.105577] env[62476]: DEBUG nova.network.neutron [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1168.161101] env[62476]: DEBUG nova.network.neutron [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1169.097239] env[62476]: DEBUG nova.network.neutron [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Updating instance_info_cache with network_info: [{"id": "25bc8784-3f67-41aa-a0ba-e4169f5d68ee", "address": "fa:16:3e:f0:f2:b6", "network": {"id": "ba218c60-1210-40ff-bed3-4aff8a3ab9af", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1556701813-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0b52d72194b94284b3177648e28aa119", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25bc8784-3f", "ovs_interfaceid": "25bc8784-3f67-41aa-a0ba-e4169f5d68ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.132716] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Releasing lock "refresh_cache-bfd1d3fe-c8ba-4b77-b633-f77010674954" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.133048] env[62476]: DEBUG nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Instance network_info: |[{"id": "25bc8784-3f67-41aa-a0ba-e4169f5d68ee", "address": "fa:16:3e:f0:f2:b6", "network": {"id": "ba218c60-1210-40ff-bed3-4aff8a3ab9af", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1556701813-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0b52d72194b94284b3177648e28aa119", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25bc8784-3f", "ovs_interfaceid": "25bc8784-3f67-41aa-a0ba-e4169f5d68ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1169.133490] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:f2:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '25bc8784-3f67-41aa-a0ba-e4169f5d68ee', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1169.141439] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Creating folder: Project (0b52d72194b94284b3177648e28aa119). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1169.142029] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a7122b1-6ece-4568-b439-bac452582111 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.152395] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Created folder: Project (0b52d72194b94284b3177648e28aa119) in parent group-v849485. [ 1169.152594] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Creating folder: Instances. Parent ref: group-v849546. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1169.152821] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8aa7debd-6e58-42d9-af03-2d8418735600 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.162039] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Created folder: Instances in parent group-v849546. [ 1169.162291] env[62476]: DEBUG oslo.service.loopingcall [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1169.162481] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1169.162685] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a050653b-3238-41e5-b6db-29a2314afa0c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.183855] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1169.183855] env[62476]: value = "task-4319100" [ 1169.183855] env[62476]: _type = "Task" [ 1169.183855] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.192588] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319100, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.695619] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319100, 'name': CreateVM_Task, 'duration_secs': 0.366121} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.695969] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1169.697028] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1169.698110] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.698110] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1169.698110] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-851cf9ef-7827-4741-9600-ffca4942467f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.703397] env[62476]: DEBUG oslo_vmware.api [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Waiting for the task: (returnval){ [ 1169.703397] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52894d85-ffa2-992a-64c6-fc0d31083e22" [ 1169.703397] env[62476]: _type = "Task" [ 1169.703397] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.712670] env[62476]: DEBUG oslo_vmware.api [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52894d85-ffa2-992a-64c6-fc0d31083e22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.962122] env[62476]: DEBUG nova.compute.manager [req-5b575b02-db7e-4369-8306-f403b15b379a req-d879e0eb-f875-47bf-8651-358e911609b2 service nova] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Received event network-changed-25bc8784-3f67-41aa-a0ba-e4169f5d68ee {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1169.962313] env[62476]: DEBUG nova.compute.manager [req-5b575b02-db7e-4369-8306-f403b15b379a req-d879e0eb-f875-47bf-8651-358e911609b2 service nova] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Refreshing instance network info cache due to event network-changed-25bc8784-3f67-41aa-a0ba-e4169f5d68ee. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1169.962600] env[62476]: DEBUG oslo_concurrency.lockutils [req-5b575b02-db7e-4369-8306-f403b15b379a req-d879e0eb-f875-47bf-8651-358e911609b2 service nova] Acquiring lock "refresh_cache-bfd1d3fe-c8ba-4b77-b633-f77010674954" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1169.962764] env[62476]: DEBUG oslo_concurrency.lockutils [req-5b575b02-db7e-4369-8306-f403b15b379a req-d879e0eb-f875-47bf-8651-358e911609b2 service nova] Acquired lock "refresh_cache-bfd1d3fe-c8ba-4b77-b633-f77010674954" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.963031] env[62476]: DEBUG nova.network.neutron [req-5b575b02-db7e-4369-8306-f403b15b379a req-d879e0eb-f875-47bf-8651-358e911609b2 service nova] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Refreshing network info cache for port 25bc8784-3f67-41aa-a0ba-e4169f5d68ee {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1170.027244] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.214566] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1170.214887] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1170.215128] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.517157] env[62476]: DEBUG nova.network.neutron [req-5b575b02-db7e-4369-8306-f403b15b379a req-d879e0eb-f875-47bf-8651-358e911609b2 service nova] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Updated VIF entry in instance network info cache for port 25bc8784-3f67-41aa-a0ba-e4169f5d68ee. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1170.517507] env[62476]: DEBUG nova.network.neutron [req-5b575b02-db7e-4369-8306-f403b15b379a req-d879e0eb-f875-47bf-8651-358e911609b2 service nova] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Updating instance_info_cache with network_info: [{"id": "25bc8784-3f67-41aa-a0ba-e4169f5d68ee", "address": "fa:16:3e:f0:f2:b6", "network": {"id": "ba218c60-1210-40ff-bed3-4aff8a3ab9af", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1556701813-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0b52d72194b94284b3177648e28aa119", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25bc8784-3f", "ovs_interfaceid": "25bc8784-3f67-41aa-a0ba-e4169f5d68ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.532662] env[62476]: DEBUG oslo_concurrency.lockutils [req-5b575b02-db7e-4369-8306-f403b15b379a req-d879e0eb-f875-47bf-8651-358e911609b2 service nova] Releasing lock "refresh_cache-bfd1d3fe-c8ba-4b77-b633-f77010674954" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.037125] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.037523] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1172.037523] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1172.062290] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1172.062459] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1172.062640] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1172.062810] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1172.062969] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1172.063157] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1172.063351] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1172.063525] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1172.063681] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1172.063884] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1172.063948] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1172.064455] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.075132] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.075361] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1172.075610] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1172.075770] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1172.076912] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05462f7d-226b-497b-b2fd-ae8dd68b209c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.086419] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca562870-6ee0-427a-b25f-b83be2e98b4f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.102123] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb365aa-03db-4d2c-b81e-8b60bf01620e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.109030] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109fb8ce-7640-4cf7-b29a-54e09e15051e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.137683] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180706MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1172.137821] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.137980] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1172.288214] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ebd0c337-82cd-4d0a-9089-b9e2c72c417d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1172.288389] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 029e3122-7587-4675-b9d9-47cf8ffdbd1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1172.288517] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1172.288643] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0561164b-f3f9-446f-b597-4b6d16a32a00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1172.288764] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1e005b4d-7f94-4263-ba5d-303af209c408 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1172.288884] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f4e97733-101b-46dd-aec4-a3287b120eb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1172.289011] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1172.289145] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance eca46087-33a7-4e9d-a7ce-6094886704a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1172.289302] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3cdef023-ce78-4c3b-8476-5508c18204c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1172.289375] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance bfd1d3fe-c8ba-4b77-b633-f77010674954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1172.305568] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1172.317858] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f1e79622-36ec-4efa-9b19-d5aeb1b9d57b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1172.328619] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a18ae56f-62d1-407e-bc7e-47907857e6b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1172.338676] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4954bf5d-20db-4787-91b5-a990ed30cdf3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1172.348936] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 51f4fbdd-836c-4645-8e63-af9827234d7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1172.360095] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance da13b71e-709e-4b89-82d7-d4f30c319f9b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1172.369753] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance afb77e4b-c7d1-4743-b9ca-1e729371a334 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1172.380103] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e23febc5-e647-4640-afbd-bb28c9483283 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1172.391551] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7211a8c4-5430-4b0c-86e7-8101ed71463e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1172.401015] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6084c1eb-51da-46b8-b0f5-5d41c363e831 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1172.413756] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 555c7c87-4335-4cb7-9b0b-357c4a832143 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1172.427616] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e77488eb-aad9-491d-95d6-a9cc39ddc2f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1172.427856] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1172.427999] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1172.443957] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing inventories for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1172.458408] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Updating ProviderTree inventory for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1172.458596] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Updating inventory in ProviderTree for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1172.469660] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing aggregate associations for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11, aggregates: None {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1172.488071] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing trait associations for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1172.794745] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e2f239-f8d1-4853-8281-6991985fd4db {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.802788] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d970356f-a6c6-4531-8a9d-e2160a4337ed {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.833263] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2267d5b1-3558-49e8-bf14-ffc1d644539f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.841184] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd23003-b8c3-4e91-b771-e41d756281a8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.854624] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1172.865084] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1172.883257] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1172.883504] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.745s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.846544] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.846544] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1174.028665] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.028910] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.027844] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.028084] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.028403] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.028602] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.028778] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Cleaning up deleted instances {{(pid=62476) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1176.038615] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] There are 0 instances to clean {{(pid=62476) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1176.651486] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ef4a9394-5715-4efc-9088-813cea61316e tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Acquiring lock "3cdef023-ce78-4c3b-8476-5508c18204c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.033380] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1177.033662] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1179.721719] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "6f133a49-bb62-45c6-a014-a2f99766d092" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.722194] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "6f133a49-bb62-45c6-a014-a2f99766d092" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.750773] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "a0490305-7494-4612-843f-bac04dd0f328" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.751133] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "a0490305-7494-4612-843f-bac04dd0f328" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.799608] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "a9a06075-ff8e-401e-9b3a-055fb50c2e2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.799838] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "a9a06075-ff8e-401e-9b3a-055fb50c2e2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.388382] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_power_states {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1206.413387] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Getting list of instances from cluster (obj){ [ 1206.413387] env[62476]: value = "domain-c8" [ 1206.413387] env[62476]: _type = "ClusterComputeResource" [ 1206.413387] env[62476]: } {{(pid=62476) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1206.414857] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66bfb0ab-1fd6-4c1d-b99b-c0051ebe1b1e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.432243] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Got total of 10 instances {{(pid=62476) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1206.432441] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid ebd0c337-82cd-4d0a-9089-b9e2c72c417d {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1206.432610] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid 029e3122-7587-4675-b9d9-47cf8ffdbd1d {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1206.432770] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1206.432927] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid 0561164b-f3f9-446f-b597-4b6d16a32a00 {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1206.433120] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid 1e005b4d-7f94-4263-ba5d-303af209c408 {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1206.433275] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid f4e97733-101b-46dd-aec4-a3287b120eb0 {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1206.433423] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1206.433569] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid eca46087-33a7-4e9d-a7ce-6094886704a1 {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1206.433902] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid 3cdef023-ce78-4c3b-8476-5508c18204c2 {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1206.434050] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid bfd1d3fe-c8ba-4b77-b633-f77010674954 {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1206.434342] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.434579] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "029e3122-7587-4675-b9d9-47cf8ffdbd1d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.434959] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.434959] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "0561164b-f3f9-446f-b597-4b6d16a32a00" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.435181] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "1e005b4d-7f94-4263-ba5d-303af209c408" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.435380] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "f4e97733-101b-46dd-aec4-a3287b120eb0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.435569] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.436050] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "eca46087-33a7-4e9d-a7ce-6094886704a1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.436050] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "3cdef023-ce78-4c3b-8476-5508c18204c2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.436205] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "bfd1d3fe-c8ba-4b77-b633-f77010674954" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.915985] env[62476]: WARNING oslo_vmware.rw_handles [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1212.915985] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1212.915985] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1212.915985] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1212.915985] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1212.915985] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1212.915985] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1212.915985] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1212.915985] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1212.915985] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1212.915985] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1212.915985] env[62476]: ERROR oslo_vmware.rw_handles [ 1212.915985] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/80244ada-a120-4714-a7d1-c28bdbdc255c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1212.916790] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1212.916790] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Copying Virtual Disk [datastore1] vmware_temp/80244ada-a120-4714-a7d1-c28bdbdc255c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/80244ada-a120-4714-a7d1-c28bdbdc255c/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1212.917049] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34b64512-6bcf-4c65-bb12-c1da5541f30f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.927876] env[62476]: DEBUG oslo_vmware.api [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Waiting for the task: (returnval){ [ 1212.927876] env[62476]: value = "task-4319101" [ 1212.927876] env[62476]: _type = "Task" [ 1212.927876] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.937348] env[62476]: DEBUG oslo_vmware.api [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Task: {'id': task-4319101, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.438077] env[62476]: DEBUG oslo_vmware.exceptions [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1213.438288] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1213.438802] env[62476]: ERROR nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1213.438802] env[62476]: Faults: ['InvalidArgument'] [ 1213.438802] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Traceback (most recent call last): [ 1213.438802] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1213.438802] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] yield resources [ 1213.438802] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1213.438802] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] self.driver.spawn(context, instance, image_meta, [ 1213.438802] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1213.438802] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1213.438802] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1213.438802] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] self._fetch_image_if_missing(context, vi) [ 1213.438802] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1213.439234] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] image_cache(vi, tmp_image_ds_loc) [ 1213.439234] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1213.439234] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] vm_util.copy_virtual_disk( [ 1213.439234] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1213.439234] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] session._wait_for_task(vmdk_copy_task) [ 1213.439234] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1213.439234] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] return self.wait_for_task(task_ref) [ 1213.439234] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1213.439234] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] return evt.wait() [ 1213.439234] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1213.439234] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] result = hub.switch() [ 1213.439234] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1213.439234] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] return self.greenlet.switch() [ 1213.439790] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1213.439790] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] self.f(*self.args, **self.kw) [ 1213.439790] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1213.439790] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] raise exceptions.translate_fault(task_info.error) [ 1213.439790] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1213.439790] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Faults: ['InvalidArgument'] [ 1213.439790] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] [ 1213.439790] env[62476]: INFO nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Terminating instance [ 1213.440857] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.441073] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1213.441239] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb0a15b4-f922-4b0f-8308-ea9d1d49f8c7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.443521] env[62476]: DEBUG nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1213.443732] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1213.444540] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e9e39a-078b-4782-9390-0bc72ed753c5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.451677] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1213.451949] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3911692-b064-4205-b797-cad092780a81 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.456580] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1213.456786] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1213.457548] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8be8a1b7-0754-4df8-a84a-8342f8349281 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.462827] env[62476]: DEBUG oslo_vmware.api [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Waiting for the task: (returnval){ [ 1213.462827] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]520aa130-14e0-a596-35e8-891b996dd6ec" [ 1213.462827] env[62476]: _type = "Task" [ 1213.462827] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.473152] env[62476]: DEBUG oslo_vmware.api [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]520aa130-14e0-a596-35e8-891b996dd6ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.974960] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1213.975319] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Creating directory with path [datastore1] vmware_temp/64e62849-b155-4eca-8493-6f022a2f5109/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1213.975626] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f8cd82e-1a5e-4370-ad84-cc9bc225f641 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.002940] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Created directory with path [datastore1] vmware_temp/64e62849-b155-4eca-8493-6f022a2f5109/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1214.002940] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Fetch image to [datastore1] vmware_temp/64e62849-b155-4eca-8493-6f022a2f5109/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1214.002940] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/64e62849-b155-4eca-8493-6f022a2f5109/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1214.003531] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d628f36-f91a-40a7-8359-2e6d9bcfa9de {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.012594] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588c5e97-b2ec-4d87-bb75-e5f4b7e3060f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.023450] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ee42c2-1d81-4786-843e-68a026fbe660 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.055868] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0accf41d-1fd2-47af-9dec-d7684cbac178 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.063618] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d83de98a-af30-4632-9484-406383fe1f9c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.085490] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1214.144172] env[62476]: DEBUG oslo_vmware.rw_handles [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/64e62849-b155-4eca-8493-6f022a2f5109/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1214.202846] env[62476]: DEBUG oslo_vmware.rw_handles [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1214.203062] env[62476]: DEBUG oslo_vmware.rw_handles [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/64e62849-b155-4eca-8493-6f022a2f5109/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1214.742709] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1214.742941] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1214.743106] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Deleting the datastore file [datastore1] ebd0c337-82cd-4d0a-9089-b9e2c72c417d {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1214.743388] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbacd796-0234-4a4c-8b82-6341ae05fc36 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.751560] env[62476]: DEBUG oslo_vmware.api [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Waiting for the task: (returnval){ [ 1214.751560] env[62476]: value = "task-4319103" [ 1214.751560] env[62476]: _type = "Task" [ 1214.751560] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.759712] env[62476]: DEBUG oslo_vmware.api [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Task: {'id': task-4319103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.261011] env[62476]: DEBUG oslo_vmware.api [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Task: {'id': task-4319103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108749} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.261310] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1215.261508] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1215.261737] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1215.261943] env[62476]: INFO nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Took 1.82 seconds to destroy the instance on the hypervisor. [ 1215.264218] env[62476]: DEBUG nova.compute.claims [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1215.264426] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.264655] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.616543] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b19b39-e59d-4ef8-8b27-76732e46a16b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.624757] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8a2d76-2bae-4376-a97b-d200632baf1a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.656016] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8603aa34-1239-472b-987e-4fb11a71fd8b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.663951] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06119b32-d281-4a8e-8325-f6fe457ec3d9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.678238] env[62476]: DEBUG nova.compute.provider_tree [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1215.688683] env[62476]: DEBUG nova.scheduler.client.report [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1215.704193] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.439s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.704734] env[62476]: ERROR nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1215.704734] env[62476]: Faults: ['InvalidArgument'] [ 1215.704734] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Traceback (most recent call last): [ 1215.704734] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1215.704734] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] self.driver.spawn(context, instance, image_meta, [ 1215.704734] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1215.704734] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1215.704734] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1215.704734] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] self._fetch_image_if_missing(context, vi) [ 1215.704734] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1215.704734] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] image_cache(vi, tmp_image_ds_loc) [ 1215.704734] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1215.705177] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] vm_util.copy_virtual_disk( [ 1215.705177] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1215.705177] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] session._wait_for_task(vmdk_copy_task) [ 1215.705177] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1215.705177] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] return self.wait_for_task(task_ref) [ 1215.705177] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1215.705177] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] return evt.wait() [ 1215.705177] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1215.705177] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] result = hub.switch() [ 1215.705177] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1215.705177] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] return self.greenlet.switch() [ 1215.705177] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1215.705177] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] self.f(*self.args, **self.kw) [ 1215.705566] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1215.705566] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] raise exceptions.translate_fault(task_info.error) [ 1215.705566] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1215.705566] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Faults: ['InvalidArgument'] [ 1215.705566] env[62476]: ERROR nova.compute.manager [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] [ 1215.705566] env[62476]: DEBUG nova.compute.utils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1215.707106] env[62476]: DEBUG nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Build of instance ebd0c337-82cd-4d0a-9089-b9e2c72c417d was re-scheduled: A specified parameter was not correct: fileType [ 1215.707106] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1215.707498] env[62476]: DEBUG nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1215.707672] env[62476]: DEBUG nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1215.707839] env[62476]: DEBUG nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1215.708014] env[62476]: DEBUG nova.network.neutron [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1216.079719] env[62476]: DEBUG nova.network.neutron [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.094623] env[62476]: INFO nova.compute.manager [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Took 0.39 seconds to deallocate network for instance. [ 1216.217293] env[62476]: INFO nova.scheduler.client.report [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Deleted allocations for instance ebd0c337-82cd-4d0a-9089-b9e2c72c417d [ 1216.244071] env[62476]: DEBUG oslo_concurrency.lockutils [None req-63e91dbd-21ed-4d5e-98fd-2785618c4220 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 566.362s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.245321] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0376f9eb-2878-4d0c-bcb3-07aa222c0ce3 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 366.455s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.245551] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0376f9eb-2878-4d0c-bcb3-07aa222c0ce3 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Acquiring lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1216.245757] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0376f9eb-2878-4d0c-bcb3-07aa222c0ce3 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.245936] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0376f9eb-2878-4d0c-bcb3-07aa222c0ce3 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.248073] env[62476]: INFO nova.compute.manager [None req-0376f9eb-2878-4d0c-bcb3-07aa222c0ce3 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Terminating instance [ 1216.251667] env[62476]: DEBUG nova.compute.manager [None req-0376f9eb-2878-4d0c-bcb3-07aa222c0ce3 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1216.251870] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0376f9eb-2878-4d0c-bcb3-07aa222c0ce3 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1216.252155] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-178215e8-2d90-4d15-8872-3e9c75b6794c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.266315] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a9d1b1-e794-4807-9c47-0926bf138257 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.281032] env[62476]: DEBUG nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1216.305873] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-0376f9eb-2878-4d0c-bcb3-07aa222c0ce3 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ebd0c337-82cd-4d0a-9089-b9e2c72c417d could not be found. [ 1216.306243] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-0376f9eb-2878-4d0c-bcb3-07aa222c0ce3 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1216.306474] env[62476]: INFO nova.compute.manager [None req-0376f9eb-2878-4d0c-bcb3-07aa222c0ce3 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1216.306769] env[62476]: DEBUG oslo.service.loopingcall [None req-0376f9eb-2878-4d0c-bcb3-07aa222c0ce3 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1216.307072] env[62476]: DEBUG nova.compute.manager [-] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1216.307245] env[62476]: DEBUG nova.network.neutron [-] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1216.343373] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1216.343663] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.345267] env[62476]: INFO nova.compute.claims [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1216.351069] env[62476]: DEBUG nova.network.neutron [-] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.359290] env[62476]: INFO nova.compute.manager [-] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] Took 0.05 seconds to deallocate network for instance. [ 1216.486874] env[62476]: DEBUG oslo_concurrency.lockutils [None req-0376f9eb-2878-4d0c-bcb3-07aa222c0ce3 tempest-ServersAdminTestJSON-2083166811 tempest-ServersAdminTestJSON-2083166811-project-member] Lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.241s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.487850] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 10.053s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.488646] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ebd0c337-82cd-4d0a-9089-b9e2c72c417d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1216.488646] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "ebd0c337-82cd-4d0a-9089-b9e2c72c417d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.790846] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870ec6db-9f40-49db-9f3c-dd1d69b9ad3d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.799160] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3d2d14-775b-4623-ae1d-c0f756c00bec {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.830561] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01aa5aea-7d79-44d8-a85d-ac803a1fdf55 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.838970] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f11c67-a82c-46e2-86ef-3dcb722e0ba2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.853236] env[62476]: DEBUG nova.compute.provider_tree [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1216.864637] env[62476]: DEBUG nova.scheduler.client.report [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1216.883785] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.540s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.884413] env[62476]: DEBUG nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1216.922710] env[62476]: DEBUG nova.compute.utils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1216.924639] env[62476]: DEBUG nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1216.924847] env[62476]: DEBUG nova.network.neutron [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1216.938825] env[62476]: DEBUG nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1216.995479] env[62476]: DEBUG nova.policy [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9648eddc7b4f4faca088386c6fef463f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d2e4aa6fb2247a9adde574f155bb4d2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1217.010337] env[62476]: DEBUG nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1217.116061] env[62476]: DEBUG nova.virt.hardware [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1217.116350] env[62476]: DEBUG nova.virt.hardware [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1217.116511] env[62476]: DEBUG nova.virt.hardware [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1217.116806] env[62476]: DEBUG nova.virt.hardware [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1217.116905] env[62476]: DEBUG nova.virt.hardware [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1217.117071] env[62476]: DEBUG nova.virt.hardware [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1217.117287] env[62476]: DEBUG nova.virt.hardware [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1217.117451] env[62476]: DEBUG nova.virt.hardware [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1217.117621] env[62476]: DEBUG nova.virt.hardware [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1217.117786] env[62476]: DEBUG nova.virt.hardware [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1217.117963] env[62476]: DEBUG nova.virt.hardware [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1217.118931] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c383f3-0f83-4ee3-bf89-24b75834aa91 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.128094] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cbb1c14-2d8a-42f8-adc4-4c3f6bb5921e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.641426] env[62476]: DEBUG nova.network.neutron [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Successfully created port: 3b195278-50d7-44d8-8e56-e191ca5d55eb {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1218.425102] env[62476]: DEBUG nova.compute.manager [req-3b98d179-7efb-4b56-ab05-91335d44f0e0 req-b0f59ca3-31a6-4022-abed-1cb9ee37707e service nova] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Received event network-vif-plugged-3b195278-50d7-44d8-8e56-e191ca5d55eb {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1218.425397] env[62476]: DEBUG oslo_concurrency.lockutils [req-3b98d179-7efb-4b56-ab05-91335d44f0e0 req-b0f59ca3-31a6-4022-abed-1cb9ee37707e service nova] Acquiring lock "5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.425714] env[62476]: DEBUG oslo_concurrency.lockutils [req-3b98d179-7efb-4b56-ab05-91335d44f0e0 req-b0f59ca3-31a6-4022-abed-1cb9ee37707e service nova] Lock "5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1218.425849] env[62476]: DEBUG oslo_concurrency.lockutils [req-3b98d179-7efb-4b56-ab05-91335d44f0e0 req-b0f59ca3-31a6-4022-abed-1cb9ee37707e service nova] Lock "5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1218.426143] env[62476]: DEBUG nova.compute.manager [req-3b98d179-7efb-4b56-ab05-91335d44f0e0 req-b0f59ca3-31a6-4022-abed-1cb9ee37707e service nova] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] No waiting events found dispatching network-vif-plugged-3b195278-50d7-44d8-8e56-e191ca5d55eb {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1218.426354] env[62476]: WARNING nova.compute.manager [req-3b98d179-7efb-4b56-ab05-91335d44f0e0 req-b0f59ca3-31a6-4022-abed-1cb9ee37707e service nova] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Received unexpected event network-vif-plugged-3b195278-50d7-44d8-8e56-e191ca5d55eb for instance with vm_state building and task_state spawning. [ 1218.494095] env[62476]: DEBUG nova.network.neutron [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Successfully updated port: 3b195278-50d7-44d8-8e56-e191ca5d55eb {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1218.510401] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "refresh_cache-5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1218.510549] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquired lock "refresh_cache-5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.510695] env[62476]: DEBUG nova.network.neutron [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1218.573349] env[62476]: DEBUG nova.network.neutron [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1218.798083] env[62476]: DEBUG nova.network.neutron [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Updating instance_info_cache with network_info: [{"id": "3b195278-50d7-44d8-8e56-e191ca5d55eb", "address": "fa:16:3e:1e:8f:dc", "network": {"id": "c3805f6e-10c5-494e-ba39-480d3c8914dd", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-489017107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2e4aa6fb2247a9adde574f155bb4d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b195278-50", "ovs_interfaceid": "3b195278-50d7-44d8-8e56-e191ca5d55eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.814252] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Releasing lock "refresh_cache-5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1218.814566] env[62476]: DEBUG nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Instance network_info: |[{"id": "3b195278-50d7-44d8-8e56-e191ca5d55eb", "address": "fa:16:3e:1e:8f:dc", "network": {"id": "c3805f6e-10c5-494e-ba39-480d3c8914dd", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-489017107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2e4aa6fb2247a9adde574f155bb4d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b195278-50", "ovs_interfaceid": "3b195278-50d7-44d8-8e56-e191ca5d55eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1218.814995] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:8f:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b195278-50d7-44d8-8e56-e191ca5d55eb', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1218.823011] env[62476]: DEBUG oslo.service.loopingcall [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1218.823562] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1218.823805] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae0e9af4-a74a-435d-b349-4b27e11b53ce {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.845011] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1218.845011] env[62476]: value = "task-4319104" [ 1218.845011] env[62476]: _type = "Task" [ 1218.845011] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.855015] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319104, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.355589] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319104, 'name': CreateVM_Task, 'duration_secs': 0.315765} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.356212] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1219.356902] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1219.357142] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.357480] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1219.357743] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2942c9b-9d4b-4dd5-8fce-b6fc800ca12d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.363055] env[62476]: DEBUG oslo_vmware.api [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Waiting for the task: (returnval){ [ 1219.363055] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]5255b5e2-1b9c-0272-4c9e-020422d21dcd" [ 1219.363055] env[62476]: _type = "Task" [ 1219.363055] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.371211] env[62476]: DEBUG oslo_vmware.api [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]5255b5e2-1b9c-0272-4c9e-020422d21dcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.873436] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1219.873722] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1219.875859] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1220.454139] env[62476]: DEBUG nova.compute.manager [req-d9104800-0f7e-4b61-b695-1621e9641173 req-880f9268-e7a9-4e1c-8833-712ac0b90d8e service nova] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Received event network-changed-3b195278-50d7-44d8-8e56-e191ca5d55eb {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1220.454423] env[62476]: DEBUG nova.compute.manager [req-d9104800-0f7e-4b61-b695-1621e9641173 req-880f9268-e7a9-4e1c-8833-712ac0b90d8e service nova] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Refreshing instance network info cache due to event network-changed-3b195278-50d7-44d8-8e56-e191ca5d55eb. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1220.454542] env[62476]: DEBUG oslo_concurrency.lockutils [req-d9104800-0f7e-4b61-b695-1621e9641173 req-880f9268-e7a9-4e1c-8833-712ac0b90d8e service nova] Acquiring lock "refresh_cache-5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1220.454876] env[62476]: DEBUG oslo_concurrency.lockutils [req-d9104800-0f7e-4b61-b695-1621e9641173 req-880f9268-e7a9-4e1c-8833-712ac0b90d8e service nova] Acquired lock "refresh_cache-5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.455087] env[62476]: DEBUG nova.network.neutron [req-d9104800-0f7e-4b61-b695-1621e9641173 req-880f9268-e7a9-4e1c-8833-712ac0b90d8e service nova] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Refreshing network info cache for port 3b195278-50d7-44d8-8e56-e191ca5d55eb {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1221.029225] env[62476]: DEBUG nova.network.neutron [req-d9104800-0f7e-4b61-b695-1621e9641173 req-880f9268-e7a9-4e1c-8833-712ac0b90d8e service nova] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Updated VIF entry in instance network info cache for port 3b195278-50d7-44d8-8e56-e191ca5d55eb. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1221.029584] env[62476]: DEBUG nova.network.neutron [req-d9104800-0f7e-4b61-b695-1621e9641173 req-880f9268-e7a9-4e1c-8833-712ac0b90d8e service nova] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Updating instance_info_cache with network_info: [{"id": "3b195278-50d7-44d8-8e56-e191ca5d55eb", "address": "fa:16:3e:1e:8f:dc", "network": {"id": "c3805f6e-10c5-494e-ba39-480d3c8914dd", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-489017107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2e4aa6fb2247a9adde574f155bb4d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b195278-50", "ovs_interfaceid": "3b195278-50d7-44d8-8e56-e191ca5d55eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.039874] env[62476]: DEBUG oslo_concurrency.lockutils [req-d9104800-0f7e-4b61-b695-1621e9641173 req-880f9268-e7a9-4e1c-8833-712ac0b90d8e service nova] Releasing lock "refresh_cache-5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1224.344375] env[62476]: DEBUG oslo_concurrency.lockutils [None req-00ff73f1-6148-4339-912f-d8ab8d6a1f82 tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Acquiring lock "bfd1d3fe-c8ba-4b77-b633-f77010674954" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.255934] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "139391d4-af04-4053-801a-792fc4fd724a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.256370] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "139391d4-af04-4053-801a-792fc4fd724a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.027600] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.027945] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1232.027945] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1232.050928] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1232.051099] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1232.051239] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1232.051367] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1232.051491] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1232.051616] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1232.051737] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1232.051857] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1232.051975] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1232.052106] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1232.052227] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1232.053026] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.063674] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.063886] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.064066] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.064414] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1232.065474] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f098f2fe-8404-4788-be15-dcdbd66c78f8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.074343] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034fda6d-2fb0-4e31-b6fe-b081ff806dfb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.088800] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdf39a0-a4d2-4185-a31c-1cd0219744bc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.095778] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5939786-3c0b-4542-a50a-6d2351ad6079 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.125775] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180692MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1232.125963] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.126196] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.207036] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 029e3122-7587-4675-b9d9-47cf8ffdbd1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1232.207036] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1232.207036] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0561164b-f3f9-446f-b597-4b6d16a32a00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1232.207036] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1e005b4d-7f94-4263-ba5d-303af209c408 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1232.207283] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f4e97733-101b-46dd-aec4-a3287b120eb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1232.207283] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1232.207283] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance eca46087-33a7-4e9d-a7ce-6094886704a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1232.207283] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3cdef023-ce78-4c3b-8476-5508c18204c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1232.207508] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance bfd1d3fe-c8ba-4b77-b633-f77010674954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1232.207615] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1232.220759] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f1e79622-36ec-4efa-9b19-d5aeb1b9d57b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.232441] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a18ae56f-62d1-407e-bc7e-47907857e6b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.244187] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4954bf5d-20db-4787-91b5-a990ed30cdf3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.255236] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 51f4fbdd-836c-4645-8e63-af9827234d7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.266539] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance da13b71e-709e-4b89-82d7-d4f30c319f9b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.278036] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance afb77e4b-c7d1-4743-b9ca-1e729371a334 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.286674] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e23febc5-e647-4640-afbd-bb28c9483283 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.297105] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7211a8c4-5430-4b0c-86e7-8101ed71463e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.309779] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6084c1eb-51da-46b8-b0f5-5d41c363e831 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.320446] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 555c7c87-4335-4cb7-9b0b-357c4a832143 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.331663] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e77488eb-aad9-491d-95d6-a9cc39ddc2f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.342736] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6f133a49-bb62-45c6-a014-a2f99766d092 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.353636] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a0490305-7494-4612-843f-bac04dd0f328 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.364474] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a9a06075-ff8e-401e-9b3a-055fb50c2e2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.374465] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 139391d4-af04-4053-801a-792fc4fd724a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1232.374722] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1232.374860] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1232.709947] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3524638e-f0d4-47e7-bfb5-eb08d80b7fc0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.718214] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3550230c-6782-4fe2-b490-90f57fd0fb9f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.748762] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f942cc-dfc2-4de7-aa68-07e22f5e0752 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.756753] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79e3e5d-a5ba-4ac6-9a13-5b873f145364 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.770624] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1232.779815] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1232.799825] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1232.800037] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.674s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.775109] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.775508] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.775551] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.775678] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1236.028419] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1237.026614] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1237.026884] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1239.022399] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1246.504653] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6bb0b348-d2d0-4f06-be93-84922c5db5af tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.636792] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2d57938c-8622-4a47-8f6d-5e247a1ec9d5 tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] Acquiring lock "0d9c9bf8-5fd9-4d26-8945-0c8a1adb230f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.637137] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2d57938c-8622-4a47-8f6d-5e247a1ec9d5 tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] Lock "0d9c9bf8-5fd9-4d26-8945-0c8a1adb230f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.295124] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5f7f8ae1-8749-4db3-9638-cd56cee92e2a tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] Acquiring lock "5c5fe542-5362-4fe9-a359-ea3eac825ca0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.295397] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5f7f8ae1-8749-4db3-9638-cd56cee92e2a tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] Lock "5c5fe542-5362-4fe9-a359-ea3eac825ca0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.695350] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5be2cd61-1b12-4e36-ae96-1ce6b355ecda tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] Acquiring lock "b304faf2-127c-4185-89c4-84093c81cf6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.695629] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5be2cd61-1b12-4e36-ae96-1ce6b355ecda tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] Lock "b304faf2-127c-4185-89c4-84093c81cf6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.438168] env[62476]: WARNING oslo_vmware.rw_handles [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1259.438168] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1259.438168] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1259.438168] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1259.438168] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1259.438168] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1259.438168] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1259.438168] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1259.438168] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1259.438168] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1259.438168] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1259.438168] env[62476]: ERROR oslo_vmware.rw_handles [ 1259.438881] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/64e62849-b155-4eca-8493-6f022a2f5109/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1259.440682] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1259.440939] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Copying Virtual Disk [datastore1] vmware_temp/64e62849-b155-4eca-8493-6f022a2f5109/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/64e62849-b155-4eca-8493-6f022a2f5109/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1259.441242] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75eb1cd8-bfdf-4308-96db-b989eecad817 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.448998] env[62476]: DEBUG oslo_vmware.api [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Waiting for the task: (returnval){ [ 1259.448998] env[62476]: value = "task-4319105" [ 1259.448998] env[62476]: _type = "Task" [ 1259.448998] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.459315] env[62476]: DEBUG oslo_vmware.api [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Task: {'id': task-4319105, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.963048] env[62476]: DEBUG oslo_vmware.exceptions [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1259.963239] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1259.963812] env[62476]: ERROR nova.compute.manager [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1259.963812] env[62476]: Faults: ['InvalidArgument'] [ 1259.963812] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Traceback (most recent call last): [ 1259.963812] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1259.963812] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] yield resources [ 1259.963812] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1259.963812] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self.driver.spawn(context, instance, image_meta, [ 1259.963812] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1259.963812] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1259.963812] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1259.963812] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self._fetch_image_if_missing(context, vi) [ 1259.963812] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1259.964293] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] image_cache(vi, tmp_image_ds_loc) [ 1259.964293] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1259.964293] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] vm_util.copy_virtual_disk( [ 1259.964293] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1259.964293] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] session._wait_for_task(vmdk_copy_task) [ 1259.964293] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1259.964293] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] return self.wait_for_task(task_ref) [ 1259.964293] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1259.964293] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] return evt.wait() [ 1259.964293] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1259.964293] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] result = hub.switch() [ 1259.964293] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1259.964293] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] return self.greenlet.switch() [ 1259.964697] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1259.964697] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self.f(*self.args, **self.kw) [ 1259.964697] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1259.964697] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] raise exceptions.translate_fault(task_info.error) [ 1259.964697] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1259.964697] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Faults: ['InvalidArgument'] [ 1259.964697] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] [ 1259.964697] env[62476]: INFO nova.compute.manager [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Terminating instance [ 1259.965751] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.965972] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1259.966513] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquiring lock "refresh_cache-029e3122-7587-4675-b9d9-47cf8ffdbd1d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1259.966676] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquired lock "refresh_cache-029e3122-7587-4675-b9d9-47cf8ffdbd1d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.966843] env[62476]: DEBUG nova.network.neutron [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1259.967825] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-500f3eaa-ec1c-4b23-bf92-fe4c5976f13f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.977266] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1259.977474] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1259.978857] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fc286ec-6391-4645-b26f-042a5be569b0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.987402] env[62476]: DEBUG oslo_vmware.api [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Waiting for the task: (returnval){ [ 1259.987402] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]5294d889-1f0c-babb-17fc-0bd2f37631a0" [ 1259.987402] env[62476]: _type = "Task" [ 1259.987402] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.995855] env[62476]: DEBUG oslo_vmware.api [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]5294d889-1f0c-babb-17fc-0bd2f37631a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.037952] env[62476]: DEBUG nova.network.neutron [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1260.450547] env[62476]: DEBUG nova.network.neutron [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1260.460288] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Releasing lock "refresh_cache-029e3122-7587-4675-b9d9-47cf8ffdbd1d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1260.460747] env[62476]: DEBUG nova.compute.manager [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1260.460950] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1260.462103] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e0cbb5-b0a5-47d4-a40a-787084fb4ed1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.471089] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1260.471344] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ce28b91-9330-49bb-8eee-e77cb6adcea8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.497719] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1260.497995] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Creating directory with path [datastore1] vmware_temp/da29743b-02cb-4388-ab8e-5d13cf64fd03/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1260.498262] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1aa8ea4-5cd3-4997-98af-fb3a08bb7499 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.515184] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Created directory with path [datastore1] vmware_temp/da29743b-02cb-4388-ab8e-5d13cf64fd03/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1260.515377] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Fetch image to [datastore1] vmware_temp/da29743b-02cb-4388-ab8e-5d13cf64fd03/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1260.515620] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/da29743b-02cb-4388-ab8e-5d13cf64fd03/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1260.515875] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1260.519290] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1260.519290] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Deleting the datastore file [datastore1] 029e3122-7587-4675-b9d9-47cf8ffdbd1d {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1260.519290] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e8f217-62cd-4203-b52a-1da43c74cab1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.521321] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3068a592-6aba-424f-8073-439cd56412a4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.528563] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c533df7e-0886-4b65-80a2-f014dfa802c2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.531059] env[62476]: DEBUG oslo_vmware.api [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Waiting for the task: (returnval){ [ 1260.531059] env[62476]: value = "task-4319107" [ 1260.531059] env[62476]: _type = "Task" [ 1260.531059] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.541067] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1913666c-48e4-4d68-bf49-a0ba9fd970cb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.548506] env[62476]: DEBUG oslo_vmware.api [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Task: {'id': task-4319107, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.577726] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5800013f-772a-431e-9c6d-e087989ba1c9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.584128] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-39f402c6-862e-4bc9-b270-d45cda64f611 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.610135] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1260.796142] env[62476]: DEBUG oslo_vmware.rw_handles [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/da29743b-02cb-4388-ab8e-5d13cf64fd03/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1260.871716] env[62476]: DEBUG oslo_vmware.rw_handles [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1260.871984] env[62476]: DEBUG oslo_vmware.rw_handles [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/da29743b-02cb-4388-ab8e-5d13cf64fd03/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1261.041740] env[62476]: DEBUG oslo_vmware.api [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Task: {'id': task-4319107, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.046227} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.042038] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1261.042250] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1261.042445] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1261.042636] env[62476]: INFO nova.compute.manager [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Took 0.58 seconds to destroy the instance on the hypervisor. [ 1261.042921] env[62476]: DEBUG oslo.service.loopingcall [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1261.043147] env[62476]: DEBUG nova.compute.manager [-] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Skipping network deallocation for instance since networking was not requested. {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1261.045510] env[62476]: DEBUG nova.compute.claims [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1261.045685] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1261.045896] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1261.489715] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe6b338-90cb-4f7f-abc6-1ef0326a364e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.497669] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe024bba-b324-4b42-8030-d7e9421df3c7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.531519] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6186e3b-3d7f-4307-8ead-8f3f5e0448d9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.541060] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207a3d79-71d4-4b3c-8b82-2748b7066358 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.556673] env[62476]: DEBUG nova.compute.provider_tree [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1261.572781] env[62476]: DEBUG nova.scheduler.client.report [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1261.589287] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.543s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1261.589943] env[62476]: ERROR nova.compute.manager [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1261.589943] env[62476]: Faults: ['InvalidArgument'] [ 1261.589943] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Traceback (most recent call last): [ 1261.589943] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1261.589943] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self.driver.spawn(context, instance, image_meta, [ 1261.589943] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1261.589943] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1261.589943] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1261.589943] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self._fetch_image_if_missing(context, vi) [ 1261.589943] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1261.589943] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] image_cache(vi, tmp_image_ds_loc) [ 1261.589943] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1261.590376] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] vm_util.copy_virtual_disk( [ 1261.590376] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1261.590376] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] session._wait_for_task(vmdk_copy_task) [ 1261.590376] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1261.590376] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] return self.wait_for_task(task_ref) [ 1261.590376] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1261.590376] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] return evt.wait() [ 1261.590376] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1261.590376] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] result = hub.switch() [ 1261.590376] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1261.590376] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] return self.greenlet.switch() [ 1261.590376] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1261.590376] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self.f(*self.args, **self.kw) [ 1261.590828] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1261.590828] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] raise exceptions.translate_fault(task_info.error) [ 1261.590828] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1261.590828] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Faults: ['InvalidArgument'] [ 1261.590828] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] [ 1261.590828] env[62476]: DEBUG nova.compute.utils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1261.592943] env[62476]: DEBUG nova.compute.manager [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Build of instance 029e3122-7587-4675-b9d9-47cf8ffdbd1d was re-scheduled: A specified parameter was not correct: fileType [ 1261.592943] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1261.593349] env[62476]: DEBUG nova.compute.manager [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1261.593582] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquiring lock "refresh_cache-029e3122-7587-4675-b9d9-47cf8ffdbd1d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1261.595563] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquired lock "refresh_cache-029e3122-7587-4675-b9d9-47cf8ffdbd1d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.595563] env[62476]: DEBUG nova.network.neutron [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1261.644942] env[62476]: DEBUG nova.network.neutron [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1261.756215] env[62476]: DEBUG nova.network.neutron [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.769008] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Releasing lock "refresh_cache-029e3122-7587-4675-b9d9-47cf8ffdbd1d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1261.769451] env[62476]: DEBUG nova.compute.manager [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1261.769451] env[62476]: DEBUG nova.compute.manager [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Skipping network deallocation for instance since networking was not requested. {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1261.877604] env[62476]: INFO nova.scheduler.client.report [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Deleted allocations for instance 029e3122-7587-4675-b9d9-47cf8ffdbd1d [ 1261.902397] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e932227f-63d6-4015-8b0e-2f1bf5ad0cae tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Lock "029e3122-7587-4675-b9d9-47cf8ffdbd1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 609.034s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1261.903605] env[62476]: DEBUG oslo_concurrency.lockutils [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Lock "029e3122-7587-4675-b9d9-47cf8ffdbd1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 408.684s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1261.903894] env[62476]: DEBUG oslo_concurrency.lockutils [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquiring lock "029e3122-7587-4675-b9d9-47cf8ffdbd1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1261.904148] env[62476]: DEBUG oslo_concurrency.lockutils [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Lock "029e3122-7587-4675-b9d9-47cf8ffdbd1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1261.904329] env[62476]: DEBUG oslo_concurrency.lockutils [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Lock "029e3122-7587-4675-b9d9-47cf8ffdbd1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1261.906339] env[62476]: INFO nova.compute.manager [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Terminating instance [ 1261.908230] env[62476]: DEBUG oslo_concurrency.lockutils [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquiring lock "refresh_cache-029e3122-7587-4675-b9d9-47cf8ffdbd1d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1261.908391] env[62476]: DEBUG oslo_concurrency.lockutils [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Acquired lock "refresh_cache-029e3122-7587-4675-b9d9-47cf8ffdbd1d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.908604] env[62476]: DEBUG nova.network.neutron [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1261.934378] env[62476]: DEBUG nova.compute.manager [None req-02a346b9-a13a-499c-a526-a8a3bfd75b23 tempest-InstanceActionsNegativeTestJSON-1079402422 tempest-InstanceActionsNegativeTestJSON-1079402422-project-member] [instance: f1e79622-36ec-4efa-9b19-d5aeb1b9d57b] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1261.938420] env[62476]: DEBUG nova.network.neutron [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1261.974649] env[62476]: DEBUG nova.compute.manager [None req-02a346b9-a13a-499c-a526-a8a3bfd75b23 tempest-InstanceActionsNegativeTestJSON-1079402422 tempest-InstanceActionsNegativeTestJSON-1079402422-project-member] [instance: f1e79622-36ec-4efa-9b19-d5aeb1b9d57b] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1262.001324] env[62476]: DEBUG oslo_concurrency.lockutils [None req-02a346b9-a13a-499c-a526-a8a3bfd75b23 tempest-InstanceActionsNegativeTestJSON-1079402422 tempest-InstanceActionsNegativeTestJSON-1079402422-project-member] Lock "f1e79622-36ec-4efa-9b19-d5aeb1b9d57b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.030s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.017416] env[62476]: DEBUG nova.compute.manager [None req-fa9d7499-add2-456f-99d9-5a05c3a1b094 tempest-ServersTestManualDisk-1677104424 tempest-ServersTestManualDisk-1677104424-project-member] [instance: a18ae56f-62d1-407e-bc7e-47907857e6b0] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1262.046993] env[62476]: DEBUG nova.compute.manager [None req-fa9d7499-add2-456f-99d9-5a05c3a1b094 tempest-ServersTestManualDisk-1677104424 tempest-ServersTestManualDisk-1677104424-project-member] [instance: a18ae56f-62d1-407e-bc7e-47907857e6b0] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1262.063689] env[62476]: DEBUG nova.network.neutron [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.075731] env[62476]: DEBUG oslo_concurrency.lockutils [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Releasing lock "refresh_cache-029e3122-7587-4675-b9d9-47cf8ffdbd1d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1262.076283] env[62476]: DEBUG nova.compute.manager [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1262.076375] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1262.077050] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4251d6fe-5491-4636-9cdb-465933fdf986 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.081575] env[62476]: DEBUG oslo_concurrency.lockutils [None req-fa9d7499-add2-456f-99d9-5a05c3a1b094 tempest-ServersTestManualDisk-1677104424 tempest-ServersTestManualDisk-1677104424-project-member] Lock "a18ae56f-62d1-407e-bc7e-47907857e6b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.043s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.088846] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3760bd79-0c29-4922-a391-ad08dfec0760 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.101880] env[62476]: DEBUG nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1262.126584] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 029e3122-7587-4675-b9d9-47cf8ffdbd1d could not be found. [ 1262.126584] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1262.126584] env[62476]: INFO nova.compute.manager [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1262.126584] env[62476]: DEBUG oslo.service.loopingcall [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1262.126584] env[62476]: DEBUG nova.compute.manager [-] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1262.127053] env[62476]: DEBUG nova.network.neutron [-] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1262.172349] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1262.172507] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.174116] env[62476]: INFO nova.compute.claims [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1262.360418] env[62476]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62476) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1262.360697] env[62476]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-b3f05f9b-8a0f-4279-9adf-3c81eda4c2ac'] [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1262.362396] env[62476]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1262.362933] env[62476]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1262.363548] env[62476]: ERROR oslo.service.loopingcall [ 1262.364069] env[62476]: ERROR nova.compute.manager [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1262.403292] env[62476]: ERROR nova.compute.manager [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1262.403292] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Traceback (most recent call last): [ 1262.403292] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.403292] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] ret = obj(*args, **kwargs) [ 1262.403292] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1262.403292] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] exception_handler_v20(status_code, error_body) [ 1262.403292] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1262.403292] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] raise client_exc(message=error_message, [ 1262.403292] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1262.403292] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Neutron server returns request_ids: ['req-b3f05f9b-8a0f-4279-9adf-3c81eda4c2ac'] [ 1262.403292] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] [ 1262.403789] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] During handling of the above exception, another exception occurred: [ 1262.403789] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] [ 1262.403789] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Traceback (most recent call last): [ 1262.403789] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1262.403789] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self._delete_instance(context, instance, bdms) [ 1262.403789] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1262.403789] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self._shutdown_instance(context, instance, bdms) [ 1262.403789] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1262.403789] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self._try_deallocate_network(context, instance, requested_networks) [ 1262.403789] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1262.403789] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] with excutils.save_and_reraise_exception(): [ 1262.403789] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1262.403789] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self.force_reraise() [ 1262.404226] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1262.404226] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] raise self.value [ 1262.404226] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1262.404226] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] _deallocate_network_with_retries() [ 1262.404226] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1262.404226] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] return evt.wait() [ 1262.404226] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1262.404226] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] result = hub.switch() [ 1262.404226] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1262.404226] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] return self.greenlet.switch() [ 1262.404226] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1262.404226] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] result = func(*self.args, **self.kw) [ 1262.404616] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1262.404616] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] result = f(*args, **kwargs) [ 1262.404616] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1262.404616] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self._deallocate_network( [ 1262.404616] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1262.404616] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self.network_api.deallocate_for_instance( [ 1262.404616] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1262.404616] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] data = neutron.list_ports(**search_opts) [ 1262.404616] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.404616] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] ret = obj(*args, **kwargs) [ 1262.404616] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1262.404616] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] return self.list('ports', self.ports_path, retrieve_all, [ 1262.404616] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.405018] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] ret = obj(*args, **kwargs) [ 1262.405018] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1262.405018] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] for r in self._pagination(collection, path, **params): [ 1262.405018] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1262.405018] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] res = self.get(path, params=params) [ 1262.405018] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.405018] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] ret = obj(*args, **kwargs) [ 1262.405018] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1262.405018] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] return self.retry_request("GET", action, body=body, [ 1262.405018] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.405018] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] ret = obj(*args, **kwargs) [ 1262.405018] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1262.405018] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] return self.do_request(method, action, body=body, [ 1262.405433] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.405433] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] ret = obj(*args, **kwargs) [ 1262.405433] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1262.405433] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] self._handle_fault_response(status_code, replybody, resp) [ 1262.405433] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1262.405433] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1262.405433] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1262.405433] env[62476]: ERROR nova.compute.manager [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] [ 1262.446676] env[62476]: DEBUG oslo_concurrency.lockutils [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Lock "029e3122-7587-4675-b9d9-47cf8ffdbd1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.543s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.450360] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "029e3122-7587-4675-b9d9-47cf8ffdbd1d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 56.016s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.450570] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1262.450756] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "029e3122-7587-4675-b9d9-47cf8ffdbd1d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.515998] env[62476]: INFO nova.compute.manager [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] [instance: 029e3122-7587-4675-b9d9-47cf8ffdbd1d] Successfully reverted task state from None on failure for instance. [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server [None req-77554d67-d3b3-4a78-8d67-9eafd34cd8f6 tempest-ServersAdmin275Test-478483510 tempest-ServersAdmin275Test-478483510-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-b3f05f9b-8a0f-4279-9adf-3c81eda4c2ac'] [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1262.523275] env[62476]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1262.523821] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1262.524374] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1262.524897] env[62476]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.525452] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1262.526059] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1262.526642] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1262.526642] env[62476]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1262.526642] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1262.526642] env[62476]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1262.526642] env[62476]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1262.526642] env[62476]: ERROR oslo_messaging.rpc.server [ 1262.599985] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe89798-0e6e-42d8-b5a7-b5d0e51fb339 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.608444] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680b955e-ce09-4428-952f-6db80b66e807 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.638960] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efa4baf-a7d9-44f9-9708-01544b0302fa {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.647145] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc31f54c-28a0-4e19-bc9b-e731baef59c0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.662028] env[62476]: DEBUG nova.compute.provider_tree [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1262.670120] env[62476]: DEBUG nova.scheduler.client.report [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1262.687156] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.515s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.687701] env[62476]: DEBUG nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1262.721771] env[62476]: DEBUG nova.compute.utils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1262.723360] env[62476]: DEBUG nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1262.723652] env[62476]: DEBUG nova.network.neutron [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1262.735571] env[62476]: DEBUG nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1262.797736] env[62476]: DEBUG nova.policy [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9e4673294b1477d93bdae5dfef42927', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16d034f4180f4aeaa8f880c3e6767730', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1262.805041] env[62476]: DEBUG nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1262.831712] env[62476]: DEBUG nova.virt.hardware [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1262.831962] env[62476]: DEBUG nova.virt.hardware [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1262.832156] env[62476]: DEBUG nova.virt.hardware [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1262.832406] env[62476]: DEBUG nova.virt.hardware [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1262.832568] env[62476]: DEBUG nova.virt.hardware [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1262.832721] env[62476]: DEBUG nova.virt.hardware [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1262.832928] env[62476]: DEBUG nova.virt.hardware [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1262.833132] env[62476]: DEBUG nova.virt.hardware [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1262.833316] env[62476]: DEBUG nova.virt.hardware [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1262.833481] env[62476]: DEBUG nova.virt.hardware [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1262.833658] env[62476]: DEBUG nova.virt.hardware [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1262.834568] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534ad3af-f010-4e5a-a71c-9f7fd994d23e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.842961] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2b3f07-e935-4e79-ad62-9ebb4e89bd87 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.184884] env[62476]: DEBUG nova.network.neutron [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Successfully created port: 57cea09c-1bb1-4c67-9505-b243d84c343b {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1263.984495] env[62476]: DEBUG nova.compute.manager [req-23954031-5926-4d60-8066-16603cb02990 req-35116759-70ff-4162-994e-973ac4805edf service nova] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Received event network-vif-plugged-57cea09c-1bb1-4c67-9505-b243d84c343b {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1263.984807] env[62476]: DEBUG oslo_concurrency.lockutils [req-23954031-5926-4d60-8066-16603cb02990 req-35116759-70ff-4162-994e-973ac4805edf service nova] Acquiring lock "4954bf5d-20db-4787-91b5-a990ed30cdf3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1263.984931] env[62476]: DEBUG oslo_concurrency.lockutils [req-23954031-5926-4d60-8066-16603cb02990 req-35116759-70ff-4162-994e-973ac4805edf service nova] Lock "4954bf5d-20db-4787-91b5-a990ed30cdf3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1263.985121] env[62476]: DEBUG oslo_concurrency.lockutils [req-23954031-5926-4d60-8066-16603cb02990 req-35116759-70ff-4162-994e-973ac4805edf service nova] Lock "4954bf5d-20db-4787-91b5-a990ed30cdf3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1263.985294] env[62476]: DEBUG nova.compute.manager [req-23954031-5926-4d60-8066-16603cb02990 req-35116759-70ff-4162-994e-973ac4805edf service nova] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] No waiting events found dispatching network-vif-plugged-57cea09c-1bb1-4c67-9505-b243d84c343b {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1263.985455] env[62476]: WARNING nova.compute.manager [req-23954031-5926-4d60-8066-16603cb02990 req-35116759-70ff-4162-994e-973ac4805edf service nova] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Received unexpected event network-vif-plugged-57cea09c-1bb1-4c67-9505-b243d84c343b for instance with vm_state building and task_state spawning. [ 1264.024026] env[62476]: DEBUG nova.network.neutron [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Successfully updated port: 57cea09c-1bb1-4c67-9505-b243d84c343b {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1264.041024] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "refresh_cache-4954bf5d-20db-4787-91b5-a990ed30cdf3" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1264.041024] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired lock "refresh_cache-4954bf5d-20db-4787-91b5-a990ed30cdf3" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.042237] env[62476]: DEBUG nova.network.neutron [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1264.098841] env[62476]: DEBUG nova.network.neutron [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1264.316422] env[62476]: DEBUG nova.network.neutron [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Updating instance_info_cache with network_info: [{"id": "57cea09c-1bb1-4c67-9505-b243d84c343b", "address": "fa:16:3e:93:38:42", "network": {"id": "3ecf6641-8ea2-463b-b2bd-1da0bbd310ec", "bridge": "br-int", "label": "tempest-ImagesTestJSON-686261071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d034f4180f4aeaa8f880c3e6767730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11", "external-id": "nsx-vlan-transportzone-707", "segmentation_id": 707, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57cea09c-1b", "ovs_interfaceid": "57cea09c-1bb1-4c67-9505-b243d84c343b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.333525] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Releasing lock "refresh_cache-4954bf5d-20db-4787-91b5-a990ed30cdf3" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1264.333941] env[62476]: DEBUG nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Instance network_info: |[{"id": "57cea09c-1bb1-4c67-9505-b243d84c343b", "address": "fa:16:3e:93:38:42", "network": {"id": "3ecf6641-8ea2-463b-b2bd-1da0bbd310ec", "bridge": "br-int", "label": "tempest-ImagesTestJSON-686261071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d034f4180f4aeaa8f880c3e6767730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11", "external-id": "nsx-vlan-transportzone-707", "segmentation_id": 707, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57cea09c-1b", "ovs_interfaceid": "57cea09c-1bb1-4c67-9505-b243d84c343b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1264.334814] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:38:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57cea09c-1bb1-4c67-9505-b243d84c343b', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1264.343528] env[62476]: DEBUG oslo.service.loopingcall [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1264.344585] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1264.344854] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8be50616-6978-45d1-9594-e86658c632ad {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.367995] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1264.367995] env[62476]: value = "task-4319108" [ 1264.367995] env[62476]: _type = "Task" [ 1264.367995] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.377390] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319108, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.879453] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319108, 'name': CreateVM_Task, 'duration_secs': 0.351174} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.879639] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1264.889401] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1264.889578] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.889912] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1264.890192] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9a6e263-1470-4bac-b5f5-b718f6187f74 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.895422] env[62476]: DEBUG oslo_vmware.api [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for the task: (returnval){ [ 1264.895422] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52a47591-4b1b-f892-046a-5826e4a4aec9" [ 1264.895422] env[62476]: _type = "Task" [ 1264.895422] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.904231] env[62476]: DEBUG oslo_vmware.api [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52a47591-4b1b-f892-046a-5826e4a4aec9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.406837] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1265.407180] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1265.407311] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1266.049725] env[62476]: DEBUG nova.compute.manager [req-26e8f1be-8ce2-45d0-9381-1baf5c429766 req-c75ad1a6-1410-474e-ad05-09dce15e5a56 service nova] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Received event network-changed-57cea09c-1bb1-4c67-9505-b243d84c343b {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1266.049922] env[62476]: DEBUG nova.compute.manager [req-26e8f1be-8ce2-45d0-9381-1baf5c429766 req-c75ad1a6-1410-474e-ad05-09dce15e5a56 service nova] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Refreshing instance network info cache due to event network-changed-57cea09c-1bb1-4c67-9505-b243d84c343b. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1266.050228] env[62476]: DEBUG oslo_concurrency.lockutils [req-26e8f1be-8ce2-45d0-9381-1baf5c429766 req-c75ad1a6-1410-474e-ad05-09dce15e5a56 service nova] Acquiring lock "refresh_cache-4954bf5d-20db-4787-91b5-a990ed30cdf3" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1266.050323] env[62476]: DEBUG oslo_concurrency.lockutils [req-26e8f1be-8ce2-45d0-9381-1baf5c429766 req-c75ad1a6-1410-474e-ad05-09dce15e5a56 service nova] Acquired lock "refresh_cache-4954bf5d-20db-4787-91b5-a990ed30cdf3" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1266.050540] env[62476]: DEBUG nova.network.neutron [req-26e8f1be-8ce2-45d0-9381-1baf5c429766 req-c75ad1a6-1410-474e-ad05-09dce15e5a56 service nova] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Refreshing network info cache for port 57cea09c-1bb1-4c67-9505-b243d84c343b {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1266.400834] env[62476]: DEBUG nova.network.neutron [req-26e8f1be-8ce2-45d0-9381-1baf5c429766 req-c75ad1a6-1410-474e-ad05-09dce15e5a56 service nova] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Updated VIF entry in instance network info cache for port 57cea09c-1bb1-4c67-9505-b243d84c343b. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1266.401235] env[62476]: DEBUG nova.network.neutron [req-26e8f1be-8ce2-45d0-9381-1baf5c429766 req-c75ad1a6-1410-474e-ad05-09dce15e5a56 service nova] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Updating instance_info_cache with network_info: [{"id": "57cea09c-1bb1-4c67-9505-b243d84c343b", "address": "fa:16:3e:93:38:42", "network": {"id": "3ecf6641-8ea2-463b-b2bd-1da0bbd310ec", "bridge": "br-int", "label": "tempest-ImagesTestJSON-686261071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d034f4180f4aeaa8f880c3e6767730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11", "external-id": "nsx-vlan-transportzone-707", "segmentation_id": 707, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57cea09c-1b", "ovs_interfaceid": "57cea09c-1bb1-4c67-9505-b243d84c343b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.412181] env[62476]: DEBUG oslo_concurrency.lockutils [req-26e8f1be-8ce2-45d0-9381-1baf5c429766 req-c75ad1a6-1410-474e-ad05-09dce15e5a56 service nova] Releasing lock "refresh_cache-4954bf5d-20db-4787-91b5-a990ed30cdf3" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1271.239085] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1fa9eade-2c85-4b65-8dd2-7894e8f04aac tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "4954bf5d-20db-4787-91b5-a990ed30cdf3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.333317] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquiring lock "003e332b-9765-4db7-9f48-40d33c6532d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.333629] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Lock "003e332b-9765-4db7-9f48-40d33c6532d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.578745] env[62476]: DEBUG oslo_concurrency.lockutils [None req-482d18b2-e12d-4680-b5e4-47b7b3d0eb36 tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquiring lock "e95a41ff-af11-48ac-8245-c70eb0a73c7e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.578994] env[62476]: DEBUG oslo_concurrency.lockutils [None req-482d18b2-e12d-4680-b5e4-47b7b3d0eb36 tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Lock "e95a41ff-af11-48ac-8245-c70eb0a73c7e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.027640] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.027964] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1292.028027] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1292.053474] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1292.053646] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1292.053783] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1292.053911] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1292.054187] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1292.054363] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1292.054493] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1292.054617] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1292.054768] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1292.054916] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1292.055058] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1292.055603] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.068386] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.068579] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.068747] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.068903] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1292.070173] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb4363f-3950-4514-9847-581ff17ffd6c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.079322] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07de4834-56aa-4543-96f8-d79bbb2f4517 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.095169] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c636e9-2e86-4096-8a21-2d93fc54bd0a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.102163] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a8e707-5c1d-4c27-9d2f-10e05c417f8c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.131237] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180723MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1292.131409] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.131610] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.224486] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.224655] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0561164b-f3f9-446f-b597-4b6d16a32a00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.224789] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1e005b4d-7f94-4263-ba5d-303af209c408 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.224919] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f4e97733-101b-46dd-aec4-a3287b120eb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.225067] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.225396] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance eca46087-33a7-4e9d-a7ce-6094886704a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.225396] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3cdef023-ce78-4c3b-8476-5508c18204c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.225567] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance bfd1d3fe-c8ba-4b77-b633-f77010674954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.225567] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.225716] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4954bf5d-20db-4787-91b5-a990ed30cdf3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1292.237039] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance afb77e4b-c7d1-4743-b9ca-1e729371a334 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.248102] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e23febc5-e647-4640-afbd-bb28c9483283 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.260573] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7211a8c4-5430-4b0c-86e7-8101ed71463e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.272632] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6084c1eb-51da-46b8-b0f5-5d41c363e831 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.285939] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 555c7c87-4335-4cb7-9b0b-357c4a832143 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.298044] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e77488eb-aad9-491d-95d6-a9cc39ddc2f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.310200] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6f133a49-bb62-45c6-a014-a2f99766d092 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.323087] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a0490305-7494-4612-843f-bac04dd0f328 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.334760] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a9a06075-ff8e-401e-9b3a-055fb50c2e2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.346931] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 139391d4-af04-4053-801a-792fc4fd724a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.361423] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0d9c9bf8-5fd9-4d26-8945-0c8a1adb230f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.376924] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5c5fe542-5362-4fe9-a359-ea3eac825ca0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.396369] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance b304faf2-127c-4185-89c4-84093c81cf6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.409697] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 003e332b-9765-4db7-9f48-40d33c6532d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.422395] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e95a41ff-af11-48ac-8245-c70eb0a73c7e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1292.423024] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1292.423024] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1292.871136] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c974774b-4acc-460b-af44-d2e3fed7bbc5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.879766] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68617bf-d26d-425f-85c1-595120c88f35 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.909985] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93835b61-eb45-4168-9eb5-21798be7de53 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.917919] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cc3dac-e863-42ff-93a3-723a481c75b6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.932022] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1292.941584] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1292.957973] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1292.958197] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.827s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1294.930245] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1294.930754] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.027253] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.027715] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1297.027846] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1298.027608] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1298.027869] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1301.022469] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1302.023383] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1310.042761] env[62476]: DEBUG oslo_concurrency.lockutils [None req-00a51f3a-7942-4732-b6bb-71bbe42cd8c2 tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] Acquiring lock "6a5ba30d-8a44-49bb-b061-fadd99dc4d4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1310.042761] env[62476]: DEBUG oslo_concurrency.lockutils [None req-00a51f3a-7942-4732-b6bb-71bbe42cd8c2 tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] Lock "6a5ba30d-8a44-49bb-b061-fadd99dc4d4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1310.218090] env[62476]: WARNING oslo_vmware.rw_handles [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1310.218090] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1310.218090] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1310.218090] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1310.218090] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1310.218090] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1310.218090] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1310.218090] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1310.218090] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1310.218090] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1310.218090] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1310.218090] env[62476]: ERROR oslo_vmware.rw_handles [ 1310.218590] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/da29743b-02cb-4388-ab8e-5d13cf64fd03/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1310.220447] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1310.220708] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Copying Virtual Disk [datastore1] vmware_temp/da29743b-02cb-4388-ab8e-5d13cf64fd03/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/da29743b-02cb-4388-ab8e-5d13cf64fd03/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1310.220986] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a22238dd-34cd-43cd-a736-25d3a793487f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.230159] env[62476]: DEBUG oslo_vmware.api [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Waiting for the task: (returnval){ [ 1310.230159] env[62476]: value = "task-4319109" [ 1310.230159] env[62476]: _type = "Task" [ 1310.230159] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.238520] env[62476]: DEBUG oslo_vmware.api [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Task: {'id': task-4319109, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.740952] env[62476]: DEBUG oslo_vmware.exceptions [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1310.742021] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1310.742236] env[62476]: ERROR nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1310.742236] env[62476]: Faults: ['InvalidArgument'] [ 1310.742236] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Traceback (most recent call last): [ 1310.742236] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1310.742236] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] yield resources [ 1310.742236] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1310.742236] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] self.driver.spawn(context, instance, image_meta, [ 1310.742236] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1310.742236] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1310.742236] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1310.742236] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] self._fetch_image_if_missing(context, vi) [ 1310.742236] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1310.742743] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] image_cache(vi, tmp_image_ds_loc) [ 1310.742743] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1310.742743] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] vm_util.copy_virtual_disk( [ 1310.742743] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1310.742743] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] session._wait_for_task(vmdk_copy_task) [ 1310.742743] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1310.742743] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] return self.wait_for_task(task_ref) [ 1310.742743] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1310.742743] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] return evt.wait() [ 1310.742743] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1310.742743] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] result = hub.switch() [ 1310.742743] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1310.742743] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] return self.greenlet.switch() [ 1310.743125] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1310.743125] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] self.f(*self.args, **self.kw) [ 1310.743125] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1310.743125] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] raise exceptions.translate_fault(task_info.error) [ 1310.743125] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1310.743125] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Faults: ['InvalidArgument'] [ 1310.743125] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] [ 1310.743125] env[62476]: INFO nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Terminating instance [ 1310.744353] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.744594] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1310.745300] env[62476]: DEBUG nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1310.745531] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1310.745802] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b061d0f-52b3-4768-99ed-bef9b59185b1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.748458] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69edd9db-9d27-4878-934c-e1cbd25dc67f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.755013] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1310.755240] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1470391e-08db-4f2d-88a8-8d746c7eb993 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.757482] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1310.757657] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1310.758632] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de073888-3261-4c31-b1ec-b7500ad8a9a6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.763660] env[62476]: DEBUG oslo_vmware.api [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for the task: (returnval){ [ 1310.763660] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52240ef2-a6e6-34bd-f0c6-9256f841b775" [ 1310.763660] env[62476]: _type = "Task" [ 1310.763660] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.771531] env[62476]: DEBUG oslo_vmware.api [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52240ef2-a6e6-34bd-f0c6-9256f841b775, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.836068] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1310.836307] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1310.836502] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Deleting the datastore file [datastore1] 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1310.836783] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c4bd1df-41ca-4431-a672-0624834346fc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.843889] env[62476]: DEBUG oslo_vmware.api [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Waiting for the task: (returnval){ [ 1310.843889] env[62476]: value = "task-4319111" [ 1310.843889] env[62476]: _type = "Task" [ 1310.843889] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.851710] env[62476]: DEBUG oslo_vmware.api [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Task: {'id': task-4319111, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.274941] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1311.275279] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Creating directory with path [datastore1] vmware_temp/4035e337-0d01-4ac6-bbf9-acaff478f336/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1311.275463] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2890a70f-83eb-4363-a081-b4fe0f7ca4c6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.287091] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Created directory with path [datastore1] vmware_temp/4035e337-0d01-4ac6-bbf9-acaff478f336/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1311.287302] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Fetch image to [datastore1] vmware_temp/4035e337-0d01-4ac6-bbf9-acaff478f336/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1311.287480] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/4035e337-0d01-4ac6-bbf9-acaff478f336/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1311.288294] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77dd4daa-a410-48d5-a1ee-bad2d7216fa4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.295275] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e8b3b6-6622-4f56-9353-3f6eaa23b4da {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.304683] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e09dce6-d67c-4cb8-8331-ca27e65711df {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.337447] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7468b1a3-d424-4f12-910d-a796993408da {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.343827] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-57be9a17-f3e5-44ee-a21f-cac2a896a09e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.353581] env[62476]: DEBUG oslo_vmware.api [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Task: {'id': task-4319111, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072716} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.353832] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1311.354083] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1311.354286] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1311.354468] env[62476]: INFO nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1311.356750] env[62476]: DEBUG nova.compute.claims [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1311.356945] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.357208] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.438035] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1311.500644] env[62476]: DEBUG oslo_vmware.rw_handles [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4035e337-0d01-4ac6-bbf9-acaff478f336/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1311.559612] env[62476]: DEBUG oslo_vmware.rw_handles [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1311.559806] env[62476]: DEBUG oslo_vmware.rw_handles [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4035e337-0d01-4ac6-bbf9-acaff478f336/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1311.769562] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a643bb-d28d-4b4f-9943-58bd70e4dcb1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.777294] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2fa9518-e103-4921-8cfa-20745e9aeea3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.805914] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1916b25c-d3d0-4854-9212-6880167ac982 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.813110] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597eb83e-a97e-4602-94be-37541ecc929e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.826669] env[62476]: DEBUG nova.compute.provider_tree [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1311.836680] env[62476]: DEBUG nova.scheduler.client.report [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1311.870056] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.513s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.870581] env[62476]: ERROR nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1311.870581] env[62476]: Faults: ['InvalidArgument'] [ 1311.870581] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Traceback (most recent call last): [ 1311.870581] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1311.870581] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] self.driver.spawn(context, instance, image_meta, [ 1311.870581] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1311.870581] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1311.870581] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1311.870581] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] self._fetch_image_if_missing(context, vi) [ 1311.870581] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1311.870581] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] image_cache(vi, tmp_image_ds_loc) [ 1311.870581] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1311.871076] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] vm_util.copy_virtual_disk( [ 1311.871076] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1311.871076] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] session._wait_for_task(vmdk_copy_task) [ 1311.871076] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1311.871076] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] return self.wait_for_task(task_ref) [ 1311.871076] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1311.871076] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] return evt.wait() [ 1311.871076] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1311.871076] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] result = hub.switch() [ 1311.871076] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1311.871076] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] return self.greenlet.switch() [ 1311.871076] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1311.871076] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] self.f(*self.args, **self.kw) [ 1311.871588] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1311.871588] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] raise exceptions.translate_fault(task_info.error) [ 1311.871588] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1311.871588] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Faults: ['InvalidArgument'] [ 1311.871588] env[62476]: ERROR nova.compute.manager [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] [ 1311.871588] env[62476]: DEBUG nova.compute.utils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1311.873241] env[62476]: DEBUG nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Build of instance 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 was re-scheduled: A specified parameter was not correct: fileType [ 1311.873241] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1311.873692] env[62476]: DEBUG nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1311.873896] env[62476]: DEBUG nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1311.874107] env[62476]: DEBUG nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1311.874289] env[62476]: DEBUG nova.network.neutron [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1312.485587] env[62476]: DEBUG nova.network.neutron [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.500289] env[62476]: INFO nova.compute.manager [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Took 0.63 seconds to deallocate network for instance. [ 1312.605729] env[62476]: INFO nova.scheduler.client.report [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Deleted allocations for instance 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 [ 1312.631444] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ac7eb4d0-edc8-452a-bc5d-b9f828d56ff2 tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 657.536s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.632903] env[62476]: DEBUG oslo_concurrency.lockutils [None req-8ede5de0-0c5f-4c7d-91aa-b1169a84fd6f tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 460.176s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.633743] env[62476]: DEBUG oslo_concurrency.lockutils [None req-8ede5de0-0c5f-4c7d-91aa-b1169a84fd6f tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Acquiring lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.633743] env[62476]: DEBUG oslo_concurrency.lockutils [None req-8ede5de0-0c5f-4c7d-91aa-b1169a84fd6f tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.633743] env[62476]: DEBUG oslo_concurrency.lockutils [None req-8ede5de0-0c5f-4c7d-91aa-b1169a84fd6f tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.637831] env[62476]: INFO nova.compute.manager [None req-8ede5de0-0c5f-4c7d-91aa-b1169a84fd6f tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Terminating instance [ 1312.639929] env[62476]: DEBUG nova.compute.manager [None req-8ede5de0-0c5f-4c7d-91aa-b1169a84fd6f tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1312.641080] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-8ede5de0-0c5f-4c7d-91aa-b1169a84fd6f tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1312.641080] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e4d7975-6729-4a7a-8d96-80e17e584373 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.651382] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fe616b-316b-4fb2-947d-35643c11b279 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.661812] env[62476]: DEBUG nova.compute.manager [None req-d141ef50-6c5c-4c75-a49f-f3f29f0300be tempest-ServerRescueTestJSONUnderV235-907344075 tempest-ServerRescueTestJSONUnderV235-907344075-project-member] [instance: 51f4fbdd-836c-4645-8e63-af9827234d7c] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1312.689041] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-8ede5de0-0c5f-4c7d-91aa-b1169a84fd6f tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0524dc08-ac1a-4f56-b44a-adbb5a0b5038 could not be found. [ 1312.689041] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-8ede5de0-0c5f-4c7d-91aa-b1169a84fd6f tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1312.689186] env[62476]: INFO nova.compute.manager [None req-8ede5de0-0c5f-4c7d-91aa-b1169a84fd6f tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1312.689461] env[62476]: DEBUG oslo.service.loopingcall [None req-8ede5de0-0c5f-4c7d-91aa-b1169a84fd6f tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1312.690055] env[62476]: DEBUG nova.compute.manager [None req-d141ef50-6c5c-4c75-a49f-f3f29f0300be tempest-ServerRescueTestJSONUnderV235-907344075 tempest-ServerRescueTestJSONUnderV235-907344075-project-member] [instance: 51f4fbdd-836c-4645-8e63-af9827234d7c] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1312.691511] env[62476]: DEBUG nova.compute.manager [-] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1312.691648] env[62476]: DEBUG nova.network.neutron [-] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1312.714858] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d141ef50-6c5c-4c75-a49f-f3f29f0300be tempest-ServerRescueTestJSONUnderV235-907344075 tempest-ServerRescueTestJSONUnderV235-907344075-project-member] Lock "51f4fbdd-836c-4645-8e63-af9827234d7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.492s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.725631] env[62476]: DEBUG nova.compute.manager [None req-28c0894c-b7dd-4d37-b3c4-e6bc32b4f71a tempest-AttachVolumeTestJSON-2018915431 tempest-AttachVolumeTestJSON-2018915431-project-member] [instance: da13b71e-709e-4b89-82d7-d4f30c319f9b] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1312.728963] env[62476]: DEBUG nova.network.neutron [-] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.737378] env[62476]: INFO nova.compute.manager [-] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] Took 0.05 seconds to deallocate network for instance. [ 1312.759538] env[62476]: DEBUG nova.compute.manager [None req-28c0894c-b7dd-4d37-b3c4-e6bc32b4f71a tempest-AttachVolumeTestJSON-2018915431 tempest-AttachVolumeTestJSON-2018915431-project-member] [instance: da13b71e-709e-4b89-82d7-d4f30c319f9b] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1312.783725] env[62476]: DEBUG oslo_concurrency.lockutils [None req-28c0894c-b7dd-4d37-b3c4-e6bc32b4f71a tempest-AttachVolumeTestJSON-2018915431 tempest-AttachVolumeTestJSON-2018915431-project-member] Lock "da13b71e-709e-4b89-82d7-d4f30c319f9b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.783s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.797585] env[62476]: DEBUG nova.compute.manager [None req-18b8d733-109e-4c79-887f-58bce9943f88 tempest-ServerTagsTestJSON-1565539301 tempest-ServerTagsTestJSON-1565539301-project-member] [instance: afb77e4b-c7d1-4743-b9ca-1e729371a334] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1312.824802] env[62476]: DEBUG nova.compute.manager [None req-18b8d733-109e-4c79-887f-58bce9943f88 tempest-ServerTagsTestJSON-1565539301 tempest-ServerTagsTestJSON-1565539301-project-member] [instance: afb77e4b-c7d1-4743-b9ca-1e729371a334] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1312.859745] env[62476]: DEBUG oslo_concurrency.lockutils [None req-18b8d733-109e-4c79-887f-58bce9943f88 tempest-ServerTagsTestJSON-1565539301 tempest-ServerTagsTestJSON-1565539301-project-member] Lock "afb77e4b-c7d1-4743-b9ca-1e729371a334" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.765s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.863965] env[62476]: DEBUG oslo_concurrency.lockutils [None req-8ede5de0-0c5f-4c7d-91aa-b1169a84fd6f tempest-VolumesAssistedSnapshotsTest-1444066257 tempest-VolumesAssistedSnapshotsTest-1444066257-project-member] Lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.231s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.865599] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 106.431s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.865773] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0524dc08-ac1a-4f56-b44a-adbb5a0b5038] During sync_power_state the instance has a pending task (deleting). Skip. [ 1312.866108] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "0524dc08-ac1a-4f56-b44a-adbb5a0b5038" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.869318] env[62476]: DEBUG nova.compute.manager [None req-22b8c072-ab47-4bbc-bbde-9e1dc727e74c tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] [instance: e23febc5-e647-4640-afbd-bb28c9483283] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1312.893922] env[62476]: DEBUG nova.compute.manager [None req-22b8c072-ab47-4bbc-bbde-9e1dc727e74c tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] [instance: e23febc5-e647-4640-afbd-bb28c9483283] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1312.915099] env[62476]: DEBUG oslo_concurrency.lockutils [None req-22b8c072-ab47-4bbc-bbde-9e1dc727e74c tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] Lock "e23febc5-e647-4640-afbd-bb28c9483283" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.219s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.924296] env[62476]: DEBUG nova.compute.manager [None req-a9efd60d-70ed-4621-bbff-3e849241392b tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: a5ad39b8-0d35-4a31-9279-bcce71363d95] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1312.949112] env[62476]: DEBUG nova.compute.manager [None req-a9efd60d-70ed-4621-bbff-3e849241392b tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: a5ad39b8-0d35-4a31-9279-bcce71363d95] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1312.971173] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a9efd60d-70ed-4621-bbff-3e849241392b tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "a5ad39b8-0d35-4a31-9279-bcce71363d95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 193.643s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.980779] env[62476]: DEBUG nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1313.031721] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1313.031982] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1313.033520] env[62476]: INFO nova.compute.claims [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1313.396206] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26971eb-7c02-4af1-8b38-643f6f059e54 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.405462] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaea1a9b-e0ff-4ceb-ad9c-5185649176d0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.436973] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9d3279-1cf0-4a58-9e78-4c0ed0fa1c3d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.445312] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fc7a5f-ce2b-46e5-9905-4cc3ed8d2939 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.459955] env[62476]: DEBUG nova.compute.provider_tree [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1313.470093] env[62476]: DEBUG nova.scheduler.client.report [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1313.484855] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.453s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1313.485483] env[62476]: DEBUG nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1313.524029] env[62476]: DEBUG nova.compute.utils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1313.525555] env[62476]: DEBUG nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1313.525555] env[62476]: DEBUG nova.network.neutron [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1313.533930] env[62476]: DEBUG nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1313.601982] env[62476]: DEBUG nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1313.605496] env[62476]: DEBUG nova.policy [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a117f106402424280e477babc21990c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f16c7f1cb3ec41ffbdd622e3ee5992ec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1313.631253] env[62476]: DEBUG nova.virt.hardware [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1313.631504] env[62476]: DEBUG nova.virt.hardware [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1313.631663] env[62476]: DEBUG nova.virt.hardware [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1313.631848] env[62476]: DEBUG nova.virt.hardware [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1313.631995] env[62476]: DEBUG nova.virt.hardware [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1313.632158] env[62476]: DEBUG nova.virt.hardware [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1313.632367] env[62476]: DEBUG nova.virt.hardware [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1313.632527] env[62476]: DEBUG nova.virt.hardware [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1313.632695] env[62476]: DEBUG nova.virt.hardware [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1313.632857] env[62476]: DEBUG nova.virt.hardware [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1313.633047] env[62476]: DEBUG nova.virt.hardware [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1313.633921] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698d3ce5-7ea4-4024-a91d-251f8e4a567b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.642502] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0659fdb7-9ba6-471b-b32a-552594c22aeb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.994314] env[62476]: DEBUG nova.network.neutron [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Successfully created port: 5438eee6-8192-4f40-8bb6-3b84349f1aaf {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1314.642779] env[62476]: DEBUG nova.compute.manager [req-b8033b09-22c0-45cb-b676-adacb7b4ce23 req-41ae445f-c966-414c-8e8e-ec6405b354a3 service nova] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Received event network-vif-plugged-5438eee6-8192-4f40-8bb6-3b84349f1aaf {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1314.643047] env[62476]: DEBUG oslo_concurrency.lockutils [req-b8033b09-22c0-45cb-b676-adacb7b4ce23 req-41ae445f-c966-414c-8e8e-ec6405b354a3 service nova] Acquiring lock "7211a8c4-5430-4b0c-86e7-8101ed71463e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1314.643236] env[62476]: DEBUG oslo_concurrency.lockutils [req-b8033b09-22c0-45cb-b676-adacb7b4ce23 req-41ae445f-c966-414c-8e8e-ec6405b354a3 service nova] Lock "7211a8c4-5430-4b0c-86e7-8101ed71463e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1314.643408] env[62476]: DEBUG oslo_concurrency.lockutils [req-b8033b09-22c0-45cb-b676-adacb7b4ce23 req-41ae445f-c966-414c-8e8e-ec6405b354a3 service nova] Lock "7211a8c4-5430-4b0c-86e7-8101ed71463e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1314.647020] env[62476]: DEBUG nova.compute.manager [req-b8033b09-22c0-45cb-b676-adacb7b4ce23 req-41ae445f-c966-414c-8e8e-ec6405b354a3 service nova] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] No waiting events found dispatching network-vif-plugged-5438eee6-8192-4f40-8bb6-3b84349f1aaf {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1314.647020] env[62476]: WARNING nova.compute.manager [req-b8033b09-22c0-45cb-b676-adacb7b4ce23 req-41ae445f-c966-414c-8e8e-ec6405b354a3 service nova] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Received unexpected event network-vif-plugged-5438eee6-8192-4f40-8bb6-3b84349f1aaf for instance with vm_state building and task_state spawning. [ 1314.758866] env[62476]: DEBUG nova.network.neutron [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Successfully updated port: 5438eee6-8192-4f40-8bb6-3b84349f1aaf {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1314.776063] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "refresh_cache-7211a8c4-5430-4b0c-86e7-8101ed71463e" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.776063] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired lock "refresh_cache-7211a8c4-5430-4b0c-86e7-8101ed71463e" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.776063] env[62476]: DEBUG nova.network.neutron [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1314.829172] env[62476]: DEBUG nova.network.neutron [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1315.109478] env[62476]: DEBUG nova.network.neutron [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Updating instance_info_cache with network_info: [{"id": "5438eee6-8192-4f40-8bb6-3b84349f1aaf", "address": "fa:16:3e:37:76:6d", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5438eee6-81", "ovs_interfaceid": "5438eee6-8192-4f40-8bb6-3b84349f1aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1315.130243] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Releasing lock "refresh_cache-7211a8c4-5430-4b0c-86e7-8101ed71463e" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1315.130615] env[62476]: DEBUG nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Instance network_info: |[{"id": "5438eee6-8192-4f40-8bb6-3b84349f1aaf", "address": "fa:16:3e:37:76:6d", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5438eee6-81", "ovs_interfaceid": "5438eee6-8192-4f40-8bb6-3b84349f1aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1315.131113] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:76:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3734b156-0f7d-4721-b23c-d000412ec2eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5438eee6-8192-4f40-8bb6-3b84349f1aaf', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1315.139840] env[62476]: DEBUG oslo.service.loopingcall [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1315.140329] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1315.140576] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb02e7df-e57a-41b2-a7ae-a53eb976db9b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.163040] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1315.163040] env[62476]: value = "task-4319112" [ 1315.163040] env[62476]: _type = "Task" [ 1315.163040] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.172722] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319112, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.675204] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319112, 'name': CreateVM_Task, 'duration_secs': 0.371437} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.675204] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1315.675858] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1315.676036] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.676362] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1315.676618] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd75d1b1-948c-4e3c-99f5-32d390c2af4f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.681543] env[62476]: DEBUG oslo_vmware.api [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for the task: (returnval){ [ 1315.681543] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52d77181-1ec4-ec5c-bf36-e3baafc49070" [ 1315.681543] env[62476]: _type = "Task" [ 1315.681543] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.689422] env[62476]: DEBUG oslo_vmware.api [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52d77181-1ec4-ec5c-bf36-e3baafc49070, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.193057] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1316.193388] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1316.193545] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1316.737232] env[62476]: DEBUG nova.compute.manager [req-02905fd9-48b4-4e7c-8c83-62c07b8a9056 req-e1f6aa5a-3d63-467b-a23f-81d0648ea211 service nova] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Received event network-changed-5438eee6-8192-4f40-8bb6-3b84349f1aaf {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1316.737232] env[62476]: DEBUG nova.compute.manager [req-02905fd9-48b4-4e7c-8c83-62c07b8a9056 req-e1f6aa5a-3d63-467b-a23f-81d0648ea211 service nova] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Refreshing instance network info cache due to event network-changed-5438eee6-8192-4f40-8bb6-3b84349f1aaf. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1316.737232] env[62476]: DEBUG oslo_concurrency.lockutils [req-02905fd9-48b4-4e7c-8c83-62c07b8a9056 req-e1f6aa5a-3d63-467b-a23f-81d0648ea211 service nova] Acquiring lock "refresh_cache-7211a8c4-5430-4b0c-86e7-8101ed71463e" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1316.737232] env[62476]: DEBUG oslo_concurrency.lockutils [req-02905fd9-48b4-4e7c-8c83-62c07b8a9056 req-e1f6aa5a-3d63-467b-a23f-81d0648ea211 service nova] Acquired lock "refresh_cache-7211a8c4-5430-4b0c-86e7-8101ed71463e" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.737232] env[62476]: DEBUG nova.network.neutron [req-02905fd9-48b4-4e7c-8c83-62c07b8a9056 req-e1f6aa5a-3d63-467b-a23f-81d0648ea211 service nova] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Refreshing network info cache for port 5438eee6-8192-4f40-8bb6-3b84349f1aaf {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1317.068877] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2e07411d-6c1f-4864-8b40-fda6a1ac3ecd tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "7211a8c4-5430-4b0c-86e7-8101ed71463e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.081134] env[62476]: DEBUG nova.network.neutron [req-02905fd9-48b4-4e7c-8c83-62c07b8a9056 req-e1f6aa5a-3d63-467b-a23f-81d0648ea211 service nova] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Updated VIF entry in instance network info cache for port 5438eee6-8192-4f40-8bb6-3b84349f1aaf. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1317.081504] env[62476]: DEBUG nova.network.neutron [req-02905fd9-48b4-4e7c-8c83-62c07b8a9056 req-e1f6aa5a-3d63-467b-a23f-81d0648ea211 service nova] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Updating instance_info_cache with network_info: [{"id": "5438eee6-8192-4f40-8bb6-3b84349f1aaf", "address": "fa:16:3e:37:76:6d", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5438eee6-81", "ovs_interfaceid": "5438eee6-8192-4f40-8bb6-3b84349f1aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1317.091846] env[62476]: DEBUG oslo_concurrency.lockutils [req-02905fd9-48b4-4e7c-8c83-62c07b8a9056 req-e1f6aa5a-3d63-467b-a23f-81d0648ea211 service nova] Releasing lock "refresh_cache-7211a8c4-5430-4b0c-86e7-8101ed71463e" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1334.779354] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "27737774-efb5-4aee-a0c0-695e78a15dd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.779981] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "27737774-efb5-4aee-a0c0-695e78a15dd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.131387] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d92df672-b8eb-4c46-83ec-f3bd473e0f9a tempest-ServersTestMultiNic-944024043 tempest-ServersTestMultiNic-944024043-project-member] Acquiring lock "f082523d-622b-4d64-b15f-a8511261f4b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.131755] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d92df672-b8eb-4c46-83ec-f3bd473e0f9a tempest-ServersTestMultiNic-944024043 tempest-ServersTestMultiNic-944024043-project-member] Lock "f082523d-622b-4d64-b15f-a8511261f4b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.027252] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.039822] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.040070] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.040246] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.040401] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1353.041562] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83bb078b-2f3f-4e2a-9b42-df5a077504f8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.051982] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e4a574-c3e0-4cee-a1b8-7bfe212a53c1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.066068] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea6139a-f6dd-467a-bb22-1369449c31f3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.072546] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995ad9da-2656-4d8a-bf4e-ae102856810c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.100527] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180725MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1353.100656] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.100816] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.179295] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0561164b-f3f9-446f-b597-4b6d16a32a00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.179464] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1e005b4d-7f94-4263-ba5d-303af209c408 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.179738] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f4e97733-101b-46dd-aec4-a3287b120eb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.179738] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.179844] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance eca46087-33a7-4e9d-a7ce-6094886704a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.179944] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3cdef023-ce78-4c3b-8476-5508c18204c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.180143] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance bfd1d3fe-c8ba-4b77-b633-f77010674954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.180240] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.180336] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4954bf5d-20db-4787-91b5-a990ed30cdf3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.180467] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7211a8c4-5430-4b0c-86e7-8101ed71463e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1353.197161] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6f133a49-bb62-45c6-a014-a2f99766d092 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.209776] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a0490305-7494-4612-843f-bac04dd0f328 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.221692] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a9a06075-ff8e-401e-9b3a-055fb50c2e2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.233014] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 139391d4-af04-4053-801a-792fc4fd724a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.244503] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0d9c9bf8-5fd9-4d26-8945-0c8a1adb230f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.260370] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5c5fe542-5362-4fe9-a359-ea3eac825ca0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.270992] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance b304faf2-127c-4185-89c4-84093c81cf6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.284030] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 003e332b-9765-4db7-9f48-40d33c6532d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.294539] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e95a41ff-af11-48ac-8245-c70eb0a73c7e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.305104] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6a5ba30d-8a44-49bb-b061-fadd99dc4d4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.315955] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 27737774-efb5-4aee-a0c0-695e78a15dd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.327721] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f082523d-622b-4d64-b15f-a8511261f4b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1353.327952] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1353.328110] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1353.590692] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17d854a-caba-44bd-b9c4-6ea1013dba80 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.598872] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4167a4-98e2-400f-bbb1-dffe4618e4b0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.627932] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a08c8c23-edd3-40aa-9ce8-bb3d7c1b69b1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.635163] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06053bf2-1e83-4f69-8612-cb5e9b16caeb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.648172] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1353.658567] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1353.673942] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1353.674143] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.573s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1354.674747] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.675077] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1354.675077] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1354.697115] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1354.697306] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1354.697443] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1354.697642] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1354.697781] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1354.697906] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1354.698041] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1354.698168] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1354.698288] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1354.698418] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1354.698615] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1355.027341] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1356.027009] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1356.027316] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1356.027470] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1359.027703] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.028038] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1360.027077] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1360.344290] env[62476]: WARNING oslo_vmware.rw_handles [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1360.344290] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1360.344290] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1360.344290] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1360.344290] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1360.344290] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1360.344290] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1360.344290] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1360.344290] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1360.344290] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1360.344290] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1360.344290] env[62476]: ERROR oslo_vmware.rw_handles [ 1360.344917] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/4035e337-0d01-4ac6-bbf9-acaff478f336/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1360.346691] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1360.346960] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Copying Virtual Disk [datastore1] vmware_temp/4035e337-0d01-4ac6-bbf9-acaff478f336/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/4035e337-0d01-4ac6-bbf9-acaff478f336/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1360.347298] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-619d1a06-de08-4252-96d6-27cf06687b12 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.357497] env[62476]: DEBUG oslo_vmware.api [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for the task: (returnval){ [ 1360.357497] env[62476]: value = "task-4319113" [ 1360.357497] env[62476]: _type = "Task" [ 1360.357497] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.365876] env[62476]: DEBUG oslo_vmware.api [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': task-4319113, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.868071] env[62476]: DEBUG oslo_vmware.exceptions [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1360.868394] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1360.869138] env[62476]: ERROR nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1360.869138] env[62476]: Faults: ['InvalidArgument'] [ 1360.869138] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Traceback (most recent call last): [ 1360.869138] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1360.869138] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] yield resources [ 1360.869138] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1360.869138] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] self.driver.spawn(context, instance, image_meta, [ 1360.869138] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1360.869138] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1360.869138] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1360.869138] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] self._fetch_image_if_missing(context, vi) [ 1360.869138] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1360.869138] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] image_cache(vi, tmp_image_ds_loc) [ 1360.869591] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1360.869591] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] vm_util.copy_virtual_disk( [ 1360.869591] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1360.869591] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] session._wait_for_task(vmdk_copy_task) [ 1360.869591] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1360.869591] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] return self.wait_for_task(task_ref) [ 1360.869591] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1360.869591] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] return evt.wait() [ 1360.869591] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1360.869591] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] result = hub.switch() [ 1360.869591] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1360.869591] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] return self.greenlet.switch() [ 1360.869591] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1360.869892] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] self.f(*self.args, **self.kw) [ 1360.869892] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1360.869892] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] raise exceptions.translate_fault(task_info.error) [ 1360.869892] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1360.869892] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Faults: ['InvalidArgument'] [ 1360.869892] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] [ 1360.869892] env[62476]: INFO nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Terminating instance [ 1360.871176] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1360.871386] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1360.871646] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01b9521b-5274-44df-a6a7-df890506952e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.874436] env[62476]: DEBUG nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1360.874631] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1360.875512] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f13cfe8-29bf-4268-b034-7e094637d16d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.882972] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1360.883249] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53101f13-4f5e-4549-b912-c47558618509 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.885553] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1360.885727] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1360.886703] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af637a17-b0f8-47e3-aa0d-fd069e646dbd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.891593] env[62476]: DEBUG oslo_vmware.api [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Waiting for the task: (returnval){ [ 1360.891593] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52b9ac1e-4b75-3b38-6cba-fa0f84cffaf4" [ 1360.891593] env[62476]: _type = "Task" [ 1360.891593] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.898668] env[62476]: DEBUG oslo_vmware.api [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52b9ac1e-4b75-3b38-6cba-fa0f84cffaf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.965958] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1360.966218] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1360.966407] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Deleting the datastore file [datastore1] 0561164b-f3f9-446f-b597-4b6d16a32a00 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1360.966774] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ab9a033-d691-425e-86da-c9c95b5bfb20 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.973089] env[62476]: DEBUG oslo_vmware.api [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for the task: (returnval){ [ 1360.973089] env[62476]: value = "task-4319115" [ 1360.973089] env[62476]: _type = "Task" [ 1360.973089] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.981104] env[62476]: DEBUG oslo_vmware.api [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': task-4319115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.402778] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1361.403146] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Creating directory with path [datastore1] vmware_temp/3ed0d0ff-95fe-4df2-aea5-a6c33b0390f5/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1361.403301] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef71d1ca-5e51-44d8-a466-4578223e4e88 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.415768] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Created directory with path [datastore1] vmware_temp/3ed0d0ff-95fe-4df2-aea5-a6c33b0390f5/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1361.416184] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Fetch image to [datastore1] vmware_temp/3ed0d0ff-95fe-4df2-aea5-a6c33b0390f5/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1361.416484] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/3ed0d0ff-95fe-4df2-aea5-a6c33b0390f5/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1361.417326] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcee07ce-ed12-4e34-b79a-b525697c87cc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.424680] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425723ad-854f-4413-91c9-0e856dac44c3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.434122] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e48f95-de54-442a-899d-d347521a618f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.464921] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f050bb-c51b-4abf-9477-f34a9e344cd2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.471086] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-516bc154-c78b-49ee-bb0a-2c8ee1aec195 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.481546] env[62476]: DEBUG oslo_vmware.api [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': task-4319115, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069375} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.481791] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1361.481975] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1361.482158] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1361.482341] env[62476]: INFO nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1361.484638] env[62476]: DEBUG nova.compute.claims [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1361.484788] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.485016] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.495894] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1361.678101] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1361.679679] env[62476]: ERROR nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7. [ 1361.679679] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Traceback (most recent call last): [ 1361.679679] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1361.679679] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1361.679679] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1361.679679] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] result = getattr(controller, method)(*args, **kwargs) [ 1361.679679] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1361.679679] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self._get(image_id) [ 1361.679679] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1361.679679] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1361.679679] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1361.680020] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] resp, body = self.http_client.get(url, headers=header) [ 1361.680020] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1361.680020] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self.request(url, 'GET', **kwargs) [ 1361.680020] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1361.680020] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self._handle_response(resp) [ 1361.680020] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1361.680020] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise exc.from_response(resp, resp.content) [ 1361.680020] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1361.680020] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1361.680020] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] During handling of the above exception, another exception occurred: [ 1361.680020] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1361.680020] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Traceback (most recent call last): [ 1361.680340] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1361.680340] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] yield resources [ 1361.680340] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1361.680340] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self.driver.spawn(context, instance, image_meta, [ 1361.680340] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1361.680340] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1361.680340] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1361.680340] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self._fetch_image_if_missing(context, vi) [ 1361.680340] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1361.680340] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] image_fetch(context, vi, tmp_image_ds_loc) [ 1361.680340] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1361.680340] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] images.fetch_image( [ 1361.680340] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1361.680701] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] metadata = IMAGE_API.get(context, image_ref) [ 1361.680701] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1361.680701] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return session.show(context, image_id, [ 1361.680701] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1361.680701] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] _reraise_translated_image_exception(image_id) [ 1361.680701] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1361.680701] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise new_exc.with_traceback(exc_trace) [ 1361.680701] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1361.680701] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1361.680701] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1361.680701] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] result = getattr(controller, method)(*args, **kwargs) [ 1361.680701] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1361.680701] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self._get(image_id) [ 1361.681035] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1361.681035] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1361.681035] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1361.681035] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] resp, body = self.http_client.get(url, headers=header) [ 1361.681035] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1361.681035] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self.request(url, 'GET', **kwargs) [ 1361.681035] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1361.681035] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self._handle_response(resp) [ 1361.681035] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1361.681035] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise exc.from_response(resp, resp.content) [ 1361.681035] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] nova.exception.ImageNotAuthorized: Not authorized for image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7. [ 1361.681035] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1361.681342] env[62476]: INFO nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Terminating instance [ 1361.681644] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.681854] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1361.682509] env[62476]: DEBUG nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1361.682696] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1361.682937] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c52871e-22a3-4cf8-8c7c-e1d916d3d396 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.685955] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0b7a79-9028-44fd-81b8-d9af3b53777d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.696111] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1361.696410] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e146db3-b27b-461c-9d52-676c9afe3f2a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.699075] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1361.699447] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1361.700380] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c7ce7ce-34b8-47f2-8e17-5fbfa859bb91 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.708565] env[62476]: DEBUG oslo_vmware.api [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Waiting for the task: (returnval){ [ 1361.708565] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]526af031-9658-3bbc-7a45-464eebe7b56d" [ 1361.708565] env[62476]: _type = "Task" [ 1361.708565] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.716720] env[62476]: DEBUG oslo_vmware.api [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]526af031-9658-3bbc-7a45-464eebe7b56d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.769682] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1361.769955] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1361.770177] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Deleting the datastore file [datastore1] 1e005b4d-7f94-4263-ba5d-303af209c408 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1361.773107] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05290a57-5c00-462f-8f38-9e7e9bcc2cec {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.781626] env[62476]: DEBUG oslo_vmware.api [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Waiting for the task: (returnval){ [ 1361.781626] env[62476]: value = "task-4319117" [ 1361.781626] env[62476]: _type = "Task" [ 1361.781626] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.794346] env[62476]: DEBUG oslo_vmware.api [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Task: {'id': task-4319117, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.835627] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70caba9-4ce0-4b61-8c8e-a4a68b164cf0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.843184] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a011f53-749d-435a-836b-cd8b3b67ac3d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.874194] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162917cc-4db2-4977-8827-c6ce41e8d472 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.882643] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8277a234-6208-4988-8f9f-ca0aa532116a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.898209] env[62476]: DEBUG nova.compute.provider_tree [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1361.906924] env[62476]: DEBUG nova.scheduler.client.report [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1361.923517] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.438s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.924362] env[62476]: ERROR nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1361.924362] env[62476]: Faults: ['InvalidArgument'] [ 1361.924362] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Traceback (most recent call last): [ 1361.924362] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1361.924362] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] self.driver.spawn(context, instance, image_meta, [ 1361.924362] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1361.924362] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1361.924362] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1361.924362] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] self._fetch_image_if_missing(context, vi) [ 1361.924362] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1361.924362] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] image_cache(vi, tmp_image_ds_loc) [ 1361.924362] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1361.924711] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] vm_util.copy_virtual_disk( [ 1361.924711] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1361.924711] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] session._wait_for_task(vmdk_copy_task) [ 1361.924711] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1361.924711] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] return self.wait_for_task(task_ref) [ 1361.924711] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1361.924711] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] return evt.wait() [ 1361.924711] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1361.924711] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] result = hub.switch() [ 1361.924711] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1361.924711] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] return self.greenlet.switch() [ 1361.924711] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1361.924711] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] self.f(*self.args, **self.kw) [ 1361.925014] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1361.925014] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] raise exceptions.translate_fault(task_info.error) [ 1361.925014] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1361.925014] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Faults: ['InvalidArgument'] [ 1361.925014] env[62476]: ERROR nova.compute.manager [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] [ 1361.925290] env[62476]: DEBUG nova.compute.utils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1361.927243] env[62476]: DEBUG nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Build of instance 0561164b-f3f9-446f-b597-4b6d16a32a00 was re-scheduled: A specified parameter was not correct: fileType [ 1361.927243] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1361.927671] env[62476]: DEBUG nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1361.927863] env[62476]: DEBUG nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1361.928047] env[62476]: DEBUG nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1361.928219] env[62476]: DEBUG nova.network.neutron [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1362.221032] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1362.221032] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Creating directory with path [datastore1] vmware_temp/c2fcb1e4-b632-461f-9035-01bcfb9f0740/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1362.221235] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f6c21a0-68b3-4cc5-8572-fde89d15c056 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.235610] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Created directory with path [datastore1] vmware_temp/c2fcb1e4-b632-461f-9035-01bcfb9f0740/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1362.235610] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Fetch image to [datastore1] vmware_temp/c2fcb1e4-b632-461f-9035-01bcfb9f0740/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1362.235610] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/c2fcb1e4-b632-461f-9035-01bcfb9f0740/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1362.236222] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54309ade-c123-4c16-90e1-1620554033df {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.245506] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e874b7e9-e02d-4d67-801f-c8e82cec16c7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.248285] env[62476]: DEBUG nova.network.neutron [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.257830] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a46416-87aa-415c-b9bf-c3cd09b5c366 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.262725] env[62476]: INFO nova.compute.manager [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Took 0.33 seconds to deallocate network for instance. [ 1362.298051] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e726a8b9-f7ce-49ac-8d36-f1a4b119b047 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.309559] env[62476]: DEBUG oslo_vmware.api [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Task: {'id': task-4319117, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079239} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.310520] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1362.310520] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1362.310677] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1362.310829] env[62476]: INFO nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1362.312307] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ed85c80f-3b3f-4749-a8dc-d7a211d50cb7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.316013] env[62476]: DEBUG nova.compute.claims [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1362.316013] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.316145] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.335845] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1362.392319] env[62476]: INFO nova.scheduler.client.report [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Deleted allocations for instance 0561164b-f3f9-446f-b597-4b6d16a32a00 [ 1362.419234] env[62476]: DEBUG oslo_vmware.rw_handles [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c2fcb1e4-b632-461f-9035-01bcfb9f0740/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1362.477161] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1658820d-4069-4ee3-a7d8-5b5c7e9b95d4 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "0561164b-f3f9-446f-b597-4b6d16a32a00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 685.095s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.482391] env[62476]: DEBUG oslo_concurrency.lockutils [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "0561164b-f3f9-446f-b597-4b6d16a32a00" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 485.948s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.482838] env[62476]: DEBUG oslo_concurrency.lockutils [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "0561164b-f3f9-446f-b597-4b6d16a32a00-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.482911] env[62476]: DEBUG oslo_concurrency.lockutils [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "0561164b-f3f9-446f-b597-4b6d16a32a00-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.483060] env[62476]: DEBUG oslo_concurrency.lockutils [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "0561164b-f3f9-446f-b597-4b6d16a32a00-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.485080] env[62476]: DEBUG oslo_vmware.rw_handles [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1362.485224] env[62476]: DEBUG oslo_vmware.rw_handles [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c2fcb1e4-b632-461f-9035-01bcfb9f0740/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1362.485860] env[62476]: INFO nova.compute.manager [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Terminating instance [ 1362.488836] env[62476]: DEBUG oslo_concurrency.lockutils [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "refresh_cache-0561164b-f3f9-446f-b597-4b6d16a32a00" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1362.489323] env[62476]: DEBUG oslo_concurrency.lockutils [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired lock "refresh_cache-0561164b-f3f9-446f-b597-4b6d16a32a00" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.489323] env[62476]: DEBUG nova.network.neutron [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1362.496322] env[62476]: DEBUG nova.compute.manager [None req-1c604d5c-54e0-4dec-9fef-683247e7438c tempest-ServersNegativeTestMultiTenantJSON-1352401850 tempest-ServersNegativeTestMultiTenantJSON-1352401850-project-member] [instance: 6084c1eb-51da-46b8-b0f5-5d41c363e831] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1362.528042] env[62476]: DEBUG nova.network.neutron [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1362.535323] env[62476]: DEBUG nova.compute.manager [None req-1c604d5c-54e0-4dec-9fef-683247e7438c tempest-ServersNegativeTestMultiTenantJSON-1352401850 tempest-ServersNegativeTestMultiTenantJSON-1352401850-project-member] [instance: 6084c1eb-51da-46b8-b0f5-5d41c363e831] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1362.560369] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1c604d5c-54e0-4dec-9fef-683247e7438c tempest-ServersNegativeTestMultiTenantJSON-1352401850 tempest-ServersNegativeTestMultiTenantJSON-1352401850-project-member] Lock "6084c1eb-51da-46b8-b0f5-5d41c363e831" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.576s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.572200] env[62476]: DEBUG nova.compute.manager [None req-03241216-c91e-45e8-bb0c-ad6e211d8d39 tempest-ServersV294TestFqdnHostnames-1752262884 tempest-ServersV294TestFqdnHostnames-1752262884-project-member] [instance: 555c7c87-4335-4cb7-9b0b-357c4a832143] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1362.599041] env[62476]: DEBUG nova.compute.manager [None req-03241216-c91e-45e8-bb0c-ad6e211d8d39 tempest-ServersV294TestFqdnHostnames-1752262884 tempest-ServersV294TestFqdnHostnames-1752262884-project-member] [instance: 555c7c87-4335-4cb7-9b0b-357c4a832143] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1362.620011] env[62476]: DEBUG oslo_concurrency.lockutils [None req-03241216-c91e-45e8-bb0c-ad6e211d8d39 tempest-ServersV294TestFqdnHostnames-1752262884 tempest-ServersV294TestFqdnHostnames-1752262884-project-member] Lock "555c7c87-4335-4cb7-9b0b-357c4a832143" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.466s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.632927] env[62476]: DEBUG nova.compute.manager [None req-c777aede-2539-4fc7-b12f-d7fa4530bf87 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: e77488eb-aad9-491d-95d6-a9cc39ddc2f9] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1362.660019] env[62476]: DEBUG nova.compute.manager [None req-c777aede-2539-4fc7-b12f-d7fa4530bf87 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: e77488eb-aad9-491d-95d6-a9cc39ddc2f9] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1362.681606] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c777aede-2539-4fc7-b12f-d7fa4530bf87 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "e77488eb-aad9-491d-95d6-a9cc39ddc2f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.498s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.684463] env[62476]: DEBUG nova.network.neutron [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.693563] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1362.696779] env[62476]: DEBUG oslo_concurrency.lockutils [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Releasing lock "refresh_cache-0561164b-f3f9-446f-b597-4b6d16a32a00" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1362.697363] env[62476]: DEBUG nova.compute.manager [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1362.697646] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1362.698346] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c05feee5-c778-4a37-86fe-92785f382d6c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.708722] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59210e68-848d-4413-b099-efe47c8437a7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.741221] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0561164b-f3f9-446f-b597-4b6d16a32a00 could not be found. [ 1362.741221] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1362.741221] env[62476]: INFO nova.compute.manager [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1362.741432] env[62476]: DEBUG oslo.service.loopingcall [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1362.744669] env[62476]: DEBUG nova.compute.manager [-] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1362.745081] env[62476]: DEBUG nova.network.neutron [-] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1362.747931] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbd8aac-9e9d-49c1-8f3f-44f7e1b040b2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.759417] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e641849-d37a-49ad-8c11-744548bff4a6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.765615] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.792315] env[62476]: DEBUG nova.network.neutron [-] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1362.794123] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefc4948-9f8c-4bc3-a8d5-88640aaf3661 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.802931] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df7a649-1a1e-4596-8f40-feda4f99421a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.807246] env[62476]: DEBUG nova.network.neutron [-] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.818721] env[62476]: DEBUG nova.compute.provider_tree [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1362.820704] env[62476]: INFO nova.compute.manager [-] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] Took 0.08 seconds to deallocate network for instance. [ 1362.829626] env[62476]: DEBUG nova.scheduler.client.report [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1362.845650] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.527s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.845650] env[62476]: ERROR nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7. [ 1362.845650] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Traceback (most recent call last): [ 1362.845650] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1362.845650] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1362.845650] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1362.845650] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] result = getattr(controller, method)(*args, **kwargs) [ 1362.845650] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1362.845650] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self._get(image_id) [ 1362.846092] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1362.846092] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1362.846092] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1362.846092] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] resp, body = self.http_client.get(url, headers=header) [ 1362.846092] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1362.846092] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self.request(url, 'GET', **kwargs) [ 1362.846092] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1362.846092] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self._handle_response(resp) [ 1362.846092] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1362.846092] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise exc.from_response(resp, resp.content) [ 1362.846092] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1362.846329] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1362.846329] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] During handling of the above exception, another exception occurred: [ 1362.846329] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1362.846329] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Traceback (most recent call last): [ 1362.846329] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1362.846329] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self.driver.spawn(context, instance, image_meta, [ 1362.846329] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1362.846329] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1362.846329] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1362.846329] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self._fetch_image_if_missing(context, vi) [ 1362.846329] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1362.846329] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] image_fetch(context, vi, tmp_image_ds_loc) [ 1362.846329] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1362.846329] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] images.fetch_image( [ 1362.846604] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1362.846604] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] metadata = IMAGE_API.get(context, image_ref) [ 1362.846604] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1362.846604] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return session.show(context, image_id, [ 1362.846604] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1362.846604] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] _reraise_translated_image_exception(image_id) [ 1362.846604] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1362.846604] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise new_exc.with_traceback(exc_trace) [ 1362.846604] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1362.846604] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1362.846604] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1362.846604] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] result = getattr(controller, method)(*args, **kwargs) [ 1362.846604] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1362.846865] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self._get(image_id) [ 1362.846865] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1362.846865] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1362.846865] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1362.846865] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] resp, body = self.http_client.get(url, headers=header) [ 1362.846865] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1362.846865] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self.request(url, 'GET', **kwargs) [ 1362.846865] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1362.846865] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self._handle_response(resp) [ 1362.846865] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1362.846865] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise exc.from_response(resp, resp.content) [ 1362.846865] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] nova.exception.ImageNotAuthorized: Not authorized for image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7. [ 1362.847115] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1362.847115] env[62476]: DEBUG nova.compute.utils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Not authorized for image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7. {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1362.847115] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.081s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.849491] env[62476]: INFO nova.compute.claims [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1362.851349] env[62476]: DEBUG nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Build of instance 1e005b4d-7f94-4263-ba5d-303af209c408 was re-scheduled: Not authorized for image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1362.853886] env[62476]: DEBUG nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1362.853886] env[62476]: DEBUG nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1362.853886] env[62476]: DEBUG nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1362.853886] env[62476]: DEBUG nova.network.neutron [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1362.940856] env[62476]: DEBUG oslo_concurrency.lockutils [None req-764ba870-c2e0-4aa2-a812-30a57f8d5e8d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "0561164b-f3f9-446f-b597-4b6d16a32a00" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.458s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.941846] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "0561164b-f3f9-446f-b597-4b6d16a32a00" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 156.507s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.942052] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 0561164b-f3f9-446f-b597-4b6d16a32a00] During sync_power_state the instance has a pending task (deleting). Skip. [ 1362.942232] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "0561164b-f3f9-446f-b597-4b6d16a32a00" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.980551] env[62476]: DEBUG neutronclient.v2_0.client [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62476) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1362.981929] env[62476]: ERROR nova.compute.manager [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1362.981929] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Traceback (most recent call last): [ 1362.981929] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1362.981929] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1362.981929] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1362.981929] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] result = getattr(controller, method)(*args, **kwargs) [ 1362.981929] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1362.981929] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self._get(image_id) [ 1362.981929] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1362.981929] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1362.981929] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1362.982209] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] resp, body = self.http_client.get(url, headers=header) [ 1362.982209] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1362.982209] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self.request(url, 'GET', **kwargs) [ 1362.982209] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1362.982209] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self._handle_response(resp) [ 1362.982209] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1362.982209] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise exc.from_response(resp, resp.content) [ 1362.982209] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1362.982209] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1362.982209] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] During handling of the above exception, another exception occurred: [ 1362.982209] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1362.982209] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Traceback (most recent call last): [ 1362.982485] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1362.982485] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self.driver.spawn(context, instance, image_meta, [ 1362.982485] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1362.982485] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1362.982485] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1362.982485] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self._fetch_image_if_missing(context, vi) [ 1362.982485] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1362.982485] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] image_fetch(context, vi, tmp_image_ds_loc) [ 1362.982485] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1362.982485] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] images.fetch_image( [ 1362.982485] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1362.982485] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] metadata = IMAGE_API.get(context, image_ref) [ 1362.982485] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1362.982767] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return session.show(context, image_id, [ 1362.982767] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1362.982767] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] _reraise_translated_image_exception(image_id) [ 1362.982767] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1362.982767] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise new_exc.with_traceback(exc_trace) [ 1362.982767] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1362.982767] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1362.982767] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1362.982767] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] result = getattr(controller, method)(*args, **kwargs) [ 1362.982767] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1362.982767] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self._get(image_id) [ 1362.982767] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1362.982767] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1362.983100] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1362.983100] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] resp, body = self.http_client.get(url, headers=header) [ 1362.983100] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1362.983100] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self.request(url, 'GET', **kwargs) [ 1362.983100] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1362.983100] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self._handle_response(resp) [ 1362.983100] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1362.983100] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise exc.from_response(resp, resp.content) [ 1362.983100] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] nova.exception.ImageNotAuthorized: Not authorized for image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7. [ 1362.983100] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1362.983100] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] During handling of the above exception, another exception occurred: [ 1362.983100] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1362.983100] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Traceback (most recent call last): [ 1362.983354] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1362.983354] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self._build_and_run_instance(context, instance, image, [ 1362.983354] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1362.983354] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise exception.RescheduledException( [ 1362.983354] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] nova.exception.RescheduledException: Build of instance 1e005b4d-7f94-4263-ba5d-303af209c408 was re-scheduled: Not authorized for image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7. [ 1362.983354] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1362.983354] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] During handling of the above exception, another exception occurred: [ 1362.983354] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1362.983354] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Traceback (most recent call last): [ 1362.983354] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.983354] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] ret = obj(*args, **kwargs) [ 1362.983354] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1362.983354] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] exception_handler_v20(status_code, error_body) [ 1362.983647] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1362.983647] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise client_exc(message=error_message, [ 1362.983647] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1362.983647] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Neutron server returns request_ids: ['req-c707c35c-820e-4180-adb5-741b82c09525'] [ 1362.983647] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1362.983647] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] During handling of the above exception, another exception occurred: [ 1362.983647] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1362.983647] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Traceback (most recent call last): [ 1362.983647] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1362.983647] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self._deallocate_network(context, instance, requested_networks) [ 1362.983647] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1362.983647] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self.network_api.deallocate_for_instance( [ 1362.983647] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1362.983923] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] data = neutron.list_ports(**search_opts) [ 1362.983923] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.983923] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] ret = obj(*args, **kwargs) [ 1362.983923] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1362.983923] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self.list('ports', self.ports_path, retrieve_all, [ 1362.983923] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.983923] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] ret = obj(*args, **kwargs) [ 1362.983923] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1362.983923] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] for r in self._pagination(collection, path, **params): [ 1362.983923] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1362.983923] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] res = self.get(path, params=params) [ 1362.983923] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.983923] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] ret = obj(*args, **kwargs) [ 1362.984280] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1362.984280] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self.retry_request("GET", action, body=body, [ 1362.984280] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.984280] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] ret = obj(*args, **kwargs) [ 1362.984280] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1362.984280] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self.do_request(method, action, body=body, [ 1362.984280] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1362.984280] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] ret = obj(*args, **kwargs) [ 1362.984280] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1362.984280] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self._handle_fault_response(status_code, replybody, resp) [ 1362.984280] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1362.984280] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise exception.Unauthorized() [ 1362.984280] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] nova.exception.Unauthorized: Not authorized. [ 1362.984544] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1363.022109] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1363.045101] env[62476]: INFO nova.scheduler.client.report [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Deleted allocations for instance 1e005b4d-7f94-4263-ba5d-303af209c408 [ 1363.070462] env[62476]: DEBUG oslo_concurrency.lockutils [None req-144b0c65-be3b-4045-998c-84da12d72736 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "1e005b4d-7f94-4263-ba5d-303af209c408" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.982s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.071711] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "1e005b4d-7f94-4263-ba5d-303af209c408" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 440.593s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.071889] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "1e005b4d-7f94-4263-ba5d-303af209c408-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.072072] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "1e005b4d-7f94-4263-ba5d-303af209c408-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.072262] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "1e005b4d-7f94-4263-ba5d-303af209c408-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.074684] env[62476]: INFO nova.compute.manager [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Terminating instance [ 1363.079296] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquiring lock "refresh_cache-1e005b4d-7f94-4263-ba5d-303af209c408" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1363.079618] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Acquired lock "refresh_cache-1e005b4d-7f94-4263-ba5d-303af209c408" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.079688] env[62476]: DEBUG nova.network.neutron [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1363.084412] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1363.137522] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.202645] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2349b2c-5970-4b44-9784-81a2a2988ae8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.210769] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfb757a-42bb-408c-95d9-4c4643a0e20f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.244790] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b10d3b-111f-4fb8-86d0-1208322bef6f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.255901] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574fa126-fc6c-4ee8-b607-7014909c7093 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.269691] env[62476]: DEBUG nova.compute.provider_tree [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1363.278836] env[62476]: DEBUG nova.scheduler.client.report [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1363.292699] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.446s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.293508] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1363.296365] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.159s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.297834] env[62476]: INFO nova.compute.claims [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1363.334084] env[62476]: DEBUG nova.compute.utils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1363.335888] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1363.336076] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1363.344124] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1363.407363] env[62476]: DEBUG nova.policy [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3679a49f26014c3a87cbd26667ebb15a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f11b74a785074d08965c52999ceda5c3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1363.451465] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1363.476537] env[62476]: DEBUG nova.network.neutron [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Updating instance_info_cache with network_info: [{"id": "0946bc11-3345-4324-aae9-826311e73130", "address": "fa:16:3e:a6:c5:0f", "network": {"id": "7c13b465-8acf-4816-874e-478853506361", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "5833896e0452492db476be34cc38d300", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0946bc11-33", "ovs_interfaceid": "0946bc11-3345-4324-aae9-826311e73130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.481872] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1363.482161] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1363.482294] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1363.482515] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1363.482656] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1363.482800] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1363.483015] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1363.483187] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1363.483353] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1363.483625] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1363.483812] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1363.484705] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd2e541-d8a8-460f-9209-9c00d55dfc48 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.491416] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Releasing lock "refresh_cache-1e005b4d-7f94-4263-ba5d-303af209c408" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1363.491797] env[62476]: DEBUG nova.compute.manager [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1363.491983] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1363.492482] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-58fc02ff-079f-4714-9af7-851c71d9d278 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.498371] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c22561f-9e7e-495e-ba44-67cf8827444c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.510089] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe78b7d4-2239-4347-af49-06112e1071d8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.549215] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1e005b4d-7f94-4263-ba5d-303af209c408 could not be found. [ 1363.549440] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1363.549620] env[62476]: INFO nova.compute.manager [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1363.549938] env[62476]: DEBUG oslo.service.loopingcall [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1363.552722] env[62476]: DEBUG nova.compute.manager [-] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1363.552828] env[62476]: DEBUG nova.network.neutron [-] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1363.678169] env[62476]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62476) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1363.678431] env[62476]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-802f9f41-9d25-4701-b87f-c97db021c4be'] [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1363.679018] env[62476]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1363.679364] env[62476]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1363.679760] env[62476]: ERROR oslo.service.loopingcall [ 1363.680136] env[62476]: ERROR nova.compute.manager [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1363.700542] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642abd7c-4125-4a17-8d16-81b4fbc3645c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.709120] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4447ef34-775d-4141-b642-fb2852b09f9c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.740322] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa0302c-38ca-4d63-aeca-cdd3f7bf56b7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.744372] env[62476]: ERROR nova.compute.manager [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1363.744372] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Traceback (most recent call last): [ 1363.744372] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.744372] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] ret = obj(*args, **kwargs) [ 1363.744372] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1363.744372] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] exception_handler_v20(status_code, error_body) [ 1363.744372] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1363.744372] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise client_exc(message=error_message, [ 1363.744372] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1363.744372] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Neutron server returns request_ids: ['req-802f9f41-9d25-4701-b87f-c97db021c4be'] [ 1363.744372] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1363.744683] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] During handling of the above exception, another exception occurred: [ 1363.744683] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1363.744683] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Traceback (most recent call last): [ 1363.744683] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1363.744683] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self._delete_instance(context, instance, bdms) [ 1363.744683] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1363.744683] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self._shutdown_instance(context, instance, bdms) [ 1363.744683] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1363.744683] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self._try_deallocate_network(context, instance, requested_networks) [ 1363.744683] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1363.744683] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] with excutils.save_and_reraise_exception(): [ 1363.744683] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1363.744683] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self.force_reraise() [ 1363.744952] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1363.744952] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise self.value [ 1363.744952] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1363.744952] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] _deallocate_network_with_retries() [ 1363.744952] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1363.744952] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return evt.wait() [ 1363.744952] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1363.744952] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] result = hub.switch() [ 1363.744952] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1363.744952] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self.greenlet.switch() [ 1363.744952] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1363.744952] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] result = func(*self.args, **self.kw) [ 1363.745225] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1363.745225] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] result = f(*args, **kwargs) [ 1363.745225] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1363.745225] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self._deallocate_network( [ 1363.745225] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1363.745225] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self.network_api.deallocate_for_instance( [ 1363.745225] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1363.745225] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] data = neutron.list_ports(**search_opts) [ 1363.745225] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.745225] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] ret = obj(*args, **kwargs) [ 1363.745225] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1363.745225] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self.list('ports', self.ports_path, retrieve_all, [ 1363.745225] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.745529] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] ret = obj(*args, **kwargs) [ 1363.745529] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1363.745529] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] for r in self._pagination(collection, path, **params): [ 1363.745529] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1363.745529] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] res = self.get(path, params=params) [ 1363.745529] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.745529] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] ret = obj(*args, **kwargs) [ 1363.745529] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1363.745529] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self.retry_request("GET", action, body=body, [ 1363.745529] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.745529] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] ret = obj(*args, **kwargs) [ 1363.745529] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1363.745529] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] return self.do_request(method, action, body=body, [ 1363.745860] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.745860] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] ret = obj(*args, **kwargs) [ 1363.745860] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1363.745860] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] self._handle_fault_response(status_code, replybody, resp) [ 1363.745860] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1363.745860] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1363.745860] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1363.745860] env[62476]: ERROR nova.compute.manager [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] [ 1363.753331] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fc9c9f-5ca4-4293-ac57-862890ff720b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.770065] env[62476]: DEBUG nova.compute.provider_tree [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1363.773156] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Lock "1e005b4d-7f94-4263-ba5d-303af209c408" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.701s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.774290] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "1e005b4d-7f94-4263-ba5d-303af209c408" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 157.339s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.774484] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] During sync_power_state the instance has a pending task (deleting). Skip. [ 1363.774658] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "1e005b4d-7f94-4263-ba5d-303af209c408" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.780139] env[62476]: DEBUG nova.scheduler.client.report [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1363.804250] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.506s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.804250] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1363.833176] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Successfully created port: b7f61eba-f6d6-4628-9c40-fe5be1105532 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1363.842612] env[62476]: INFO nova.compute.manager [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] [instance: 1e005b4d-7f94-4263-ba5d-303af209c408] Successfully reverted task state from None on failure for instance. [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server [None req-a74ff5a6-7e88-48cc-a6e3-f722c8437520 tempest-DeleteServersAdminTestJSON-324697181 tempest-DeleteServersAdminTestJSON-324697181-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-802f9f41-9d25-4701-b87f-c97db021c4be'] [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1363.846642] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1363.847138] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1363.847636] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1363.848126] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1363.848591] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1363.849056] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1363.849537] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1363.849537] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1363.849537] env[62476]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1363.849537] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1363.849537] env[62476]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1363.849537] env[62476]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1363.849537] env[62476]: ERROR oslo_messaging.rpc.server [ 1363.851978] env[62476]: DEBUG nova.compute.utils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1363.853317] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1363.853495] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1363.869025] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1363.926885] env[62476]: DEBUG nova.policy [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3679a49f26014c3a87cbd26667ebb15a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f11b74a785074d08965c52999ceda5c3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1363.957953] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1363.987850] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1363.988125] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1363.988286] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1363.988471] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1363.988639] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1363.988797] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1363.989015] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1363.989186] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1363.989356] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1363.989516] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1363.989690] env[62476]: DEBUG nova.virt.hardware [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1363.990607] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8faea4a7-368f-4b28-8d94-a22d6145fb5b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.999464] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c8664c-5061-4fcc-bab5-4c3e3ed3efb8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.359571] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Successfully created port: 1570aad8-95a3-40e8-b3c1-717c1887399e {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1364.597483] env[62476]: DEBUG nova.compute.manager [req-49c621a7-ad29-41e4-8e28-36d49424a158 req-e12a5a7b-f53a-4b3c-bfad-6e42440ad70d service nova] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Received event network-vif-plugged-b7f61eba-f6d6-4628-9c40-fe5be1105532 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1364.597791] env[62476]: DEBUG oslo_concurrency.lockutils [req-49c621a7-ad29-41e4-8e28-36d49424a158 req-e12a5a7b-f53a-4b3c-bfad-6e42440ad70d service nova] Acquiring lock "6f133a49-bb62-45c6-a014-a2f99766d092-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.598016] env[62476]: DEBUG oslo_concurrency.lockutils [req-49c621a7-ad29-41e4-8e28-36d49424a158 req-e12a5a7b-f53a-4b3c-bfad-6e42440ad70d service nova] Lock "6f133a49-bb62-45c6-a014-a2f99766d092-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.598510] env[62476]: DEBUG oslo_concurrency.lockutils [req-49c621a7-ad29-41e4-8e28-36d49424a158 req-e12a5a7b-f53a-4b3c-bfad-6e42440ad70d service nova] Lock "6f133a49-bb62-45c6-a014-a2f99766d092-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.598730] env[62476]: DEBUG nova.compute.manager [req-49c621a7-ad29-41e4-8e28-36d49424a158 req-e12a5a7b-f53a-4b3c-bfad-6e42440ad70d service nova] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] No waiting events found dispatching network-vif-plugged-b7f61eba-f6d6-4628-9c40-fe5be1105532 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1364.598910] env[62476]: WARNING nova.compute.manager [req-49c621a7-ad29-41e4-8e28-36d49424a158 req-e12a5a7b-f53a-4b3c-bfad-6e42440ad70d service nova] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Received unexpected event network-vif-plugged-b7f61eba-f6d6-4628-9c40-fe5be1105532 for instance with vm_state building and task_state spawning. [ 1364.679226] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Successfully updated port: b7f61eba-f6d6-4628-9c40-fe5be1105532 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1364.691746] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "refresh_cache-6f133a49-bb62-45c6-a014-a2f99766d092" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.691906] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquired lock "refresh_cache-6f133a49-bb62-45c6-a014-a2f99766d092" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.692068] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1364.768633] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1364.980980] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Updating instance_info_cache with network_info: [{"id": "b7f61eba-f6d6-4628-9c40-fe5be1105532", "address": "fa:16:3e:a3:50:a6", "network": {"id": "679529b5-027b-4061-a8a3-a863a3d0835d", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1709465969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f11b74a785074d08965c52999ceda5c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73eeba7c-29e1-4fdf-82b3-d62e63e86051", "external-id": "cl2-zone-659", "segmentation_id": 659, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7f61eba-f6", "ovs_interfaceid": "b7f61eba-f6d6-4628-9c40-fe5be1105532", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.991654] env[62476]: DEBUG nova.compute.manager [req-dd33529f-124f-48d8-8f37-09dd52c665be req-63f14952-6a94-4448-95fc-cd7da9e4b773 service nova] [instance: a0490305-7494-4612-843f-bac04dd0f328] Received event network-vif-plugged-1570aad8-95a3-40e8-b3c1-717c1887399e {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1364.991885] env[62476]: DEBUG oslo_concurrency.lockutils [req-dd33529f-124f-48d8-8f37-09dd52c665be req-63f14952-6a94-4448-95fc-cd7da9e4b773 service nova] Acquiring lock "a0490305-7494-4612-843f-bac04dd0f328-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.992115] env[62476]: DEBUG oslo_concurrency.lockutils [req-dd33529f-124f-48d8-8f37-09dd52c665be req-63f14952-6a94-4448-95fc-cd7da9e4b773 service nova] Lock "a0490305-7494-4612-843f-bac04dd0f328-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.992306] env[62476]: DEBUG oslo_concurrency.lockutils [req-dd33529f-124f-48d8-8f37-09dd52c665be req-63f14952-6a94-4448-95fc-cd7da9e4b773 service nova] Lock "a0490305-7494-4612-843f-bac04dd0f328-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.992474] env[62476]: DEBUG nova.compute.manager [req-dd33529f-124f-48d8-8f37-09dd52c665be req-63f14952-6a94-4448-95fc-cd7da9e4b773 service nova] [instance: a0490305-7494-4612-843f-bac04dd0f328] No waiting events found dispatching network-vif-plugged-1570aad8-95a3-40e8-b3c1-717c1887399e {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1364.992640] env[62476]: WARNING nova.compute.manager [req-dd33529f-124f-48d8-8f37-09dd52c665be req-63f14952-6a94-4448-95fc-cd7da9e4b773 service nova] [instance: a0490305-7494-4612-843f-bac04dd0f328] Received unexpected event network-vif-plugged-1570aad8-95a3-40e8-b3c1-717c1887399e for instance with vm_state building and task_state spawning. [ 1365.002607] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Releasing lock "refresh_cache-6f133a49-bb62-45c6-a014-a2f99766d092" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.002899] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Instance network_info: |[{"id": "b7f61eba-f6d6-4628-9c40-fe5be1105532", "address": "fa:16:3e:a3:50:a6", "network": {"id": "679529b5-027b-4061-a8a3-a863a3d0835d", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1709465969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f11b74a785074d08965c52999ceda5c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73eeba7c-29e1-4fdf-82b3-d62e63e86051", "external-id": "cl2-zone-659", "segmentation_id": 659, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7f61eba-f6", "ovs_interfaceid": "b7f61eba-f6d6-4628-9c40-fe5be1105532", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1365.003311] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:50:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73eeba7c-29e1-4fdf-82b3-d62e63e86051', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7f61eba-f6d6-4628-9c40-fe5be1105532', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1365.011232] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Creating folder: Project (f11b74a785074d08965c52999ceda5c3). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1365.012273] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cab6ffda-cdb1-4125-a12c-5c6c78b68da1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.024231] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Created folder: Project (f11b74a785074d08965c52999ceda5c3) in parent group-v849485. [ 1365.024948] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Creating folder: Instances. Parent ref: group-v849552. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1365.025289] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-989a8b7d-7da7-42ca-a5f6-be75892c7c2e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.037470] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Created folder: Instances in parent group-v849552. [ 1365.037735] env[62476]: DEBUG oslo.service.loopingcall [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1365.037940] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1365.038166] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d20cd86-2e09-4d0f-ade3-b361ec6eaef8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.058721] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1365.058721] env[62476]: value = "task-4319120" [ 1365.058721] env[62476]: _type = "Task" [ 1365.058721] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.068375] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319120, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.074613] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Successfully updated port: 1570aad8-95a3-40e8-b3c1-717c1887399e {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1365.085835] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "refresh_cache-a0490305-7494-4612-843f-bac04dd0f328" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1365.085968] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquired lock "refresh_cache-a0490305-7494-4612-843f-bac04dd0f328" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.086159] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1365.163501] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1365.438252] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Updating instance_info_cache with network_info: [{"id": "1570aad8-95a3-40e8-b3c1-717c1887399e", "address": "fa:16:3e:fa:a4:2b", "network": {"id": "679529b5-027b-4061-a8a3-a863a3d0835d", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1709465969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f11b74a785074d08965c52999ceda5c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73eeba7c-29e1-4fdf-82b3-d62e63e86051", "external-id": "cl2-zone-659", "segmentation_id": 659, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1570aad8-95", "ovs_interfaceid": "1570aad8-95a3-40e8-b3c1-717c1887399e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.456048] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Releasing lock "refresh_cache-a0490305-7494-4612-843f-bac04dd0f328" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.456388] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Instance network_info: |[{"id": "1570aad8-95a3-40e8-b3c1-717c1887399e", "address": "fa:16:3e:fa:a4:2b", "network": {"id": "679529b5-027b-4061-a8a3-a863a3d0835d", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1709465969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f11b74a785074d08965c52999ceda5c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73eeba7c-29e1-4fdf-82b3-d62e63e86051", "external-id": "cl2-zone-659", "segmentation_id": 659, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1570aad8-95", "ovs_interfaceid": "1570aad8-95a3-40e8-b3c1-717c1887399e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1365.457086] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:a4:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73eeba7c-29e1-4fdf-82b3-d62e63e86051', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1570aad8-95a3-40e8-b3c1-717c1887399e', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1365.464848] env[62476]: DEBUG oslo.service.loopingcall [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1365.465387] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0490305-7494-4612-843f-bac04dd0f328] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1365.465641] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3bde3231-e105-4cdd-9ea8-d2d18673950b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.486852] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1365.486852] env[62476]: value = "task-4319121" [ 1365.486852] env[62476]: _type = "Task" [ 1365.486852] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.495838] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319121, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.569596] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319120, 'name': CreateVM_Task, 'duration_secs': 0.378634} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.569777] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1365.570454] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1365.570623] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.570953] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1365.571223] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc0c6708-9eb7-4a03-ab8e-94cd40406530 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.577097] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Waiting for the task: (returnval){ [ 1365.577097] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]5208eea5-1810-b5f6-945f-eb304c3bf396" [ 1365.577097] env[62476]: _type = "Task" [ 1365.577097] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.585539] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]5208eea5-1810-b5f6-945f-eb304c3bf396, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.997962] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319121, 'name': CreateVM_Task, 'duration_secs': 0.362971} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.998279] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0490305-7494-4612-843f-bac04dd0f328] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1365.998798] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.092553] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1366.092959] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1366.093304] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.093625] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.094056] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1366.094403] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2608f75b-8b6c-4fa2-ad7e-e27ddfebc8db {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.100295] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Waiting for the task: (returnval){ [ 1366.100295] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]528100ce-bc78-55a6-7f4b-82cd0d6ac28b" [ 1366.100295] env[62476]: _type = "Task" [ 1366.100295] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.108635] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]528100ce-bc78-55a6-7f4b-82cd0d6ac28b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.611559] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1366.611745] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1366.611981] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.681570] env[62476]: DEBUG nova.compute.manager [req-0f2078bb-d61a-4396-ad85-6ef90bc8f241 req-447b93ad-f9ac-425d-8048-22446e6597b9 service nova] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Received event network-changed-b7f61eba-f6d6-4628-9c40-fe5be1105532 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1366.681773] env[62476]: DEBUG nova.compute.manager [req-0f2078bb-d61a-4396-ad85-6ef90bc8f241 req-447b93ad-f9ac-425d-8048-22446e6597b9 service nova] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Refreshing instance network info cache due to event network-changed-b7f61eba-f6d6-4628-9c40-fe5be1105532. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1366.681987] env[62476]: DEBUG oslo_concurrency.lockutils [req-0f2078bb-d61a-4396-ad85-6ef90bc8f241 req-447b93ad-f9ac-425d-8048-22446e6597b9 service nova] Acquiring lock "refresh_cache-6f133a49-bb62-45c6-a014-a2f99766d092" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.682145] env[62476]: DEBUG oslo_concurrency.lockutils [req-0f2078bb-d61a-4396-ad85-6ef90bc8f241 req-447b93ad-f9ac-425d-8048-22446e6597b9 service nova] Acquired lock "refresh_cache-6f133a49-bb62-45c6-a014-a2f99766d092" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.682306] env[62476]: DEBUG nova.network.neutron [req-0f2078bb-d61a-4396-ad85-6ef90bc8f241 req-447b93ad-f9ac-425d-8048-22446e6597b9 service nova] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Refreshing network info cache for port b7f61eba-f6d6-4628-9c40-fe5be1105532 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1366.970184] env[62476]: DEBUG nova.network.neutron [req-0f2078bb-d61a-4396-ad85-6ef90bc8f241 req-447b93ad-f9ac-425d-8048-22446e6597b9 service nova] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Updated VIF entry in instance network info cache for port b7f61eba-f6d6-4628-9c40-fe5be1105532. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1366.970559] env[62476]: DEBUG nova.network.neutron [req-0f2078bb-d61a-4396-ad85-6ef90bc8f241 req-447b93ad-f9ac-425d-8048-22446e6597b9 service nova] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Updating instance_info_cache with network_info: [{"id": "b7f61eba-f6d6-4628-9c40-fe5be1105532", "address": "fa:16:3e:a3:50:a6", "network": {"id": "679529b5-027b-4061-a8a3-a863a3d0835d", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1709465969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f11b74a785074d08965c52999ceda5c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73eeba7c-29e1-4fdf-82b3-d62e63e86051", "external-id": "cl2-zone-659", "segmentation_id": 659, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7f61eba-f6", "ovs_interfaceid": "b7f61eba-f6d6-4628-9c40-fe5be1105532", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1366.980467] env[62476]: DEBUG oslo_concurrency.lockutils [req-0f2078bb-d61a-4396-ad85-6ef90bc8f241 req-447b93ad-f9ac-425d-8048-22446e6597b9 service nova] Releasing lock "refresh_cache-6f133a49-bb62-45c6-a014-a2f99766d092" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1367.021031] env[62476]: DEBUG nova.compute.manager [req-4c747b8c-0a7f-437d-b221-5de9eb107dfd req-51c9e323-ce41-4492-89bb-feeb65a810c6 service nova] [instance: a0490305-7494-4612-843f-bac04dd0f328] Received event network-changed-1570aad8-95a3-40e8-b3c1-717c1887399e {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1367.021365] env[62476]: DEBUG nova.compute.manager [req-4c747b8c-0a7f-437d-b221-5de9eb107dfd req-51c9e323-ce41-4492-89bb-feeb65a810c6 service nova] [instance: a0490305-7494-4612-843f-bac04dd0f328] Refreshing instance network info cache due to event network-changed-1570aad8-95a3-40e8-b3c1-717c1887399e. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1367.021589] env[62476]: DEBUG oslo_concurrency.lockutils [req-4c747b8c-0a7f-437d-b221-5de9eb107dfd req-51c9e323-ce41-4492-89bb-feeb65a810c6 service nova] Acquiring lock "refresh_cache-a0490305-7494-4612-843f-bac04dd0f328" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.021844] env[62476]: DEBUG oslo_concurrency.lockutils [req-4c747b8c-0a7f-437d-b221-5de9eb107dfd req-51c9e323-ce41-4492-89bb-feeb65a810c6 service nova] Acquired lock "refresh_cache-a0490305-7494-4612-843f-bac04dd0f328" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.022133] env[62476]: DEBUG nova.network.neutron [req-4c747b8c-0a7f-437d-b221-5de9eb107dfd req-51c9e323-ce41-4492-89bb-feeb65a810c6 service nova] [instance: a0490305-7494-4612-843f-bac04dd0f328] Refreshing network info cache for port 1570aad8-95a3-40e8-b3c1-717c1887399e {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1367.325644] env[62476]: DEBUG nova.network.neutron [req-4c747b8c-0a7f-437d-b221-5de9eb107dfd req-51c9e323-ce41-4492-89bb-feeb65a810c6 service nova] [instance: a0490305-7494-4612-843f-bac04dd0f328] Updated VIF entry in instance network info cache for port 1570aad8-95a3-40e8-b3c1-717c1887399e. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1367.326035] env[62476]: DEBUG nova.network.neutron [req-4c747b8c-0a7f-437d-b221-5de9eb107dfd req-51c9e323-ce41-4492-89bb-feeb65a810c6 service nova] [instance: a0490305-7494-4612-843f-bac04dd0f328] Updating instance_info_cache with network_info: [{"id": "1570aad8-95a3-40e8-b3c1-717c1887399e", "address": "fa:16:3e:fa:a4:2b", "network": {"id": "679529b5-027b-4061-a8a3-a863a3d0835d", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1709465969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f11b74a785074d08965c52999ceda5c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73eeba7c-29e1-4fdf-82b3-d62e63e86051", "external-id": "cl2-zone-659", "segmentation_id": 659, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1570aad8-95", "ovs_interfaceid": "1570aad8-95a3-40e8-b3c1-717c1887399e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.336055] env[62476]: DEBUG oslo_concurrency.lockutils [req-4c747b8c-0a7f-437d-b221-5de9eb107dfd req-51c9e323-ce41-4492-89bb-feeb65a810c6 service nova] Releasing lock "refresh_cache-a0490305-7494-4612-843f-bac04dd0f328" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.653359] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a55db418-8e7c-49cb-86e5-b471c9f225e2 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "a0490305-7494-4612-843f-bac04dd0f328" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.731238] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b37686ab-76b6-4cc2-a24f-5e14a160f0b9 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "6f133a49-bb62-45c6-a014-a2f99766d092" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.921785] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Acquiring lock "fe895d70-4c56-4854-83bf-a66cc1623d59" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.922074] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Lock "fe895d70-4c56-4854-83bf-a66cc1623d59" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.264734] env[62476]: WARNING oslo_vmware.rw_handles [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1410.264734] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1410.264734] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1410.264734] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1410.264734] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1410.264734] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1410.264734] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1410.264734] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1410.264734] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1410.264734] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1410.264734] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1410.264734] env[62476]: ERROR oslo_vmware.rw_handles [ 1410.265328] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/c2fcb1e4-b632-461f-9035-01bcfb9f0740/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1410.267706] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1410.268013] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Copying Virtual Disk [datastore1] vmware_temp/c2fcb1e4-b632-461f-9035-01bcfb9f0740/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/c2fcb1e4-b632-461f-9035-01bcfb9f0740/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1410.268357] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-360483ff-5c7f-4885-8943-72b548084634 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.277097] env[62476]: DEBUG oslo_vmware.api [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Waiting for the task: (returnval){ [ 1410.277097] env[62476]: value = "task-4319122" [ 1410.277097] env[62476]: _type = "Task" [ 1410.277097] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.286013] env[62476]: DEBUG oslo_vmware.api [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Task: {'id': task-4319122, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.788111] env[62476]: DEBUG oslo_vmware.exceptions [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1410.788459] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1410.789101] env[62476]: ERROR nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1410.789101] env[62476]: Faults: ['InvalidArgument'] [ 1410.789101] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Traceback (most recent call last): [ 1410.789101] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1410.789101] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] yield resources [ 1410.789101] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1410.789101] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self.driver.spawn(context, instance, image_meta, [ 1410.789101] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1410.789101] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1410.789101] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1410.789101] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self._fetch_image_if_missing(context, vi) [ 1410.789101] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1410.789452] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] image_cache(vi, tmp_image_ds_loc) [ 1410.789452] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1410.789452] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] vm_util.copy_virtual_disk( [ 1410.789452] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1410.789452] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] session._wait_for_task(vmdk_copy_task) [ 1410.789452] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1410.789452] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return self.wait_for_task(task_ref) [ 1410.789452] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1410.789452] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return evt.wait() [ 1410.789452] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1410.789452] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] result = hub.switch() [ 1410.789452] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1410.789452] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return self.greenlet.switch() [ 1410.789761] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1410.789761] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self.f(*self.args, **self.kw) [ 1410.789761] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1410.789761] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] raise exceptions.translate_fault(task_info.error) [ 1410.789761] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1410.789761] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Faults: ['InvalidArgument'] [ 1410.789761] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] [ 1410.789761] env[62476]: INFO nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Terminating instance [ 1410.791202] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.791586] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1410.791911] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09658ab7-cb45-45c4-a8f3-d4c87ac92421 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.795477] env[62476]: DEBUG nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1410.795714] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1410.796472] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14afeaf6-83a3-41eb-9c46-e23fe1a42992 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.803379] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1410.803661] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-47629b02-bb86-423c-a23c-f5903c08b33d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.805991] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1410.806224] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1410.807249] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa5956b5-9f23-40f3-a75d-052639ab496a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.812328] env[62476]: DEBUG oslo_vmware.api [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for the task: (returnval){ [ 1410.812328] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]5275788f-cb2d-125f-1b70-def183d6e257" [ 1410.812328] env[62476]: _type = "Task" [ 1410.812328] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.819849] env[62476]: DEBUG oslo_vmware.api [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]5275788f-cb2d-125f-1b70-def183d6e257, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.874098] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1410.874412] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1410.874642] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Deleting the datastore file [datastore1] f4e97733-101b-46dd-aec4-a3287b120eb0 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1410.874965] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce2b5ae0-b54f-4f6d-ac29-0a01bada9567 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.882305] env[62476]: DEBUG oslo_vmware.api [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Waiting for the task: (returnval){ [ 1410.882305] env[62476]: value = "task-4319124" [ 1410.882305] env[62476]: _type = "Task" [ 1410.882305] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.891086] env[62476]: DEBUG oslo_vmware.api [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Task: {'id': task-4319124, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.324572] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1411.326019] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Creating directory with path [datastore1] vmware_temp/0e65f550-c280-42ed-ac72-14355d54703b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1411.326019] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc475084-6fc4-4d95-9c14-70c0fd80ea7b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.340588] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Created directory with path [datastore1] vmware_temp/0e65f550-c280-42ed-ac72-14355d54703b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1411.340817] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Fetch image to [datastore1] vmware_temp/0e65f550-c280-42ed-ac72-14355d54703b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1411.340994] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/0e65f550-c280-42ed-ac72-14355d54703b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1411.341858] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85cfd5a-02a1-4041-9c83-0fbde340cf36 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.350254] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8761c6-1e8d-45d9-8194-2437dcff52da {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.360178] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290394a6-082a-4fda-ad5a-dc681b979f26 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.395469] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c2364a-e9fd-49f5-886c-9281b8fd4479 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.404377] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-504de29e-0afe-46a6-9da9-967bf5646027 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.406085] env[62476]: DEBUG oslo_vmware.api [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Task: {'id': task-4319124, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085029} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.406333] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1411.406518] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1411.406693] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1411.406866] env[62476]: INFO nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1411.408986] env[62476]: DEBUG nova.compute.claims [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1411.409174] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.409419] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.428607] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1411.485479] env[62476]: DEBUG oslo_vmware.rw_handles [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0e65f550-c280-42ed-ac72-14355d54703b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1411.547713] env[62476]: DEBUG oslo_vmware.rw_handles [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1411.547914] env[62476]: DEBUG oslo_vmware.rw_handles [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0e65f550-c280-42ed-ac72-14355d54703b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1411.766214] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d625e1-554d-4042-b891-951637db1460 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.774431] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07818014-a23a-4955-aa4e-22159afc6558 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.804121] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a13f5c9-721d-473f-bd94-cec76f782771 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.812488] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b448c8f1-fada-4c53-a0f8-bc73ee4a1d6e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.827030] env[62476]: DEBUG nova.compute.provider_tree [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.837019] env[62476]: DEBUG nova.scheduler.client.report [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1411.854432] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.445s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.854984] env[62476]: ERROR nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1411.854984] env[62476]: Faults: ['InvalidArgument'] [ 1411.854984] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Traceback (most recent call last): [ 1411.854984] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1411.854984] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self.driver.spawn(context, instance, image_meta, [ 1411.854984] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1411.854984] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1411.854984] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1411.854984] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self._fetch_image_if_missing(context, vi) [ 1411.854984] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1411.854984] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] image_cache(vi, tmp_image_ds_loc) [ 1411.854984] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1411.855414] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] vm_util.copy_virtual_disk( [ 1411.855414] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1411.855414] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] session._wait_for_task(vmdk_copy_task) [ 1411.855414] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1411.855414] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return self.wait_for_task(task_ref) [ 1411.855414] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1411.855414] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return evt.wait() [ 1411.855414] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1411.855414] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] result = hub.switch() [ 1411.855414] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1411.855414] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return self.greenlet.switch() [ 1411.855414] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1411.855414] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self.f(*self.args, **self.kw) [ 1411.855878] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1411.855878] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] raise exceptions.translate_fault(task_info.error) [ 1411.855878] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1411.855878] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Faults: ['InvalidArgument'] [ 1411.855878] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] [ 1411.855878] env[62476]: DEBUG nova.compute.utils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1411.857666] env[62476]: DEBUG nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Build of instance f4e97733-101b-46dd-aec4-a3287b120eb0 was re-scheduled: A specified parameter was not correct: fileType [ 1411.857666] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1411.858052] env[62476]: DEBUG nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1411.858255] env[62476]: DEBUG nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1411.858419] env[62476]: DEBUG nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1411.858623] env[62476]: DEBUG nova.network.neutron [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1411.967224] env[62476]: DEBUG neutronclient.v2_0.client [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62476) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1411.968411] env[62476]: ERROR nova.compute.manager [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1411.968411] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Traceback (most recent call last): [ 1411.968411] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1411.968411] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self.driver.spawn(context, instance, image_meta, [ 1411.968411] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1411.968411] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1411.968411] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1411.968411] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self._fetch_image_if_missing(context, vi) [ 1411.968411] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1411.968411] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] image_cache(vi, tmp_image_ds_loc) [ 1411.968411] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1411.968411] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] vm_util.copy_virtual_disk( [ 1411.968745] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1411.968745] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] session._wait_for_task(vmdk_copy_task) [ 1411.968745] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1411.968745] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return self.wait_for_task(task_ref) [ 1411.968745] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1411.968745] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return evt.wait() [ 1411.968745] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1411.968745] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] result = hub.switch() [ 1411.968745] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1411.968745] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return self.greenlet.switch() [ 1411.968745] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1411.968745] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self.f(*self.args, **self.kw) [ 1411.968745] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1411.969183] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] raise exceptions.translate_fault(task_info.error) [ 1411.969183] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1411.969183] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Faults: ['InvalidArgument'] [ 1411.969183] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] [ 1411.969183] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] During handling of the above exception, another exception occurred: [ 1411.969183] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] [ 1411.969183] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Traceback (most recent call last): [ 1411.969183] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1411.969183] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self._build_and_run_instance(context, instance, image, [ 1411.969183] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1411.969183] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] raise exception.RescheduledException( [ 1411.969183] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] nova.exception.RescheduledException: Build of instance f4e97733-101b-46dd-aec4-a3287b120eb0 was re-scheduled: A specified parameter was not correct: fileType [ 1411.969183] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Faults: ['InvalidArgument'] [ 1411.969183] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] [ 1411.969567] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] During handling of the above exception, another exception occurred: [ 1411.969567] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] [ 1411.969567] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Traceback (most recent call last): [ 1411.969567] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1411.969567] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] ret = obj(*args, **kwargs) [ 1411.969567] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1411.969567] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] exception_handler_v20(status_code, error_body) [ 1411.969567] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1411.969567] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] raise client_exc(message=error_message, [ 1411.969567] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1411.969567] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Neutron server returns request_ids: ['req-d4ce6c1e-36fd-47cd-b286-8e2eab4a46c8'] [ 1411.969567] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] [ 1411.969567] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] During handling of the above exception, another exception occurred: [ 1411.969930] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] [ 1411.969930] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Traceback (most recent call last): [ 1411.969930] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1411.969930] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self._deallocate_network(context, instance, requested_networks) [ 1411.969930] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1411.969930] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self.network_api.deallocate_for_instance( [ 1411.969930] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1411.969930] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] data = neutron.list_ports(**search_opts) [ 1411.969930] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1411.969930] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] ret = obj(*args, **kwargs) [ 1411.969930] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1411.969930] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return self.list('ports', self.ports_path, retrieve_all, [ 1411.969930] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1411.970299] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] ret = obj(*args, **kwargs) [ 1411.970299] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1411.970299] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] for r in self._pagination(collection, path, **params): [ 1411.970299] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1411.970299] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] res = self.get(path, params=params) [ 1411.970299] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1411.970299] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] ret = obj(*args, **kwargs) [ 1411.970299] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1411.970299] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return self.retry_request("GET", action, body=body, [ 1411.970299] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1411.970299] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] ret = obj(*args, **kwargs) [ 1411.970299] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1411.970299] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return self.do_request(method, action, body=body, [ 1411.970659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1411.970659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] ret = obj(*args, **kwargs) [ 1411.970659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1411.970659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self._handle_fault_response(status_code, replybody, resp) [ 1411.970659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1411.970659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] raise exception.Unauthorized() [ 1411.970659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] nova.exception.Unauthorized: Not authorized. [ 1411.970659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] [ 1412.023960] env[62476]: INFO nova.scheduler.client.report [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Deleted allocations for instance f4e97733-101b-46dd-aec4-a3287b120eb0 [ 1412.046765] env[62476]: DEBUG oslo_concurrency.lockutils [None req-016b4fad-0354-4bf9-8235-885fdf447762 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Lock "f4e97733-101b-46dd-aec4-a3287b120eb0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 631.011s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.048018] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Lock "f4e97733-101b-46dd-aec4-a3287b120eb0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 433.456s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.048336] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Acquiring lock "f4e97733-101b-46dd-aec4-a3287b120eb0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.048593] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Lock "f4e97733-101b-46dd-aec4-a3287b120eb0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.048801] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Lock "f4e97733-101b-46dd-aec4-a3287b120eb0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.050813] env[62476]: INFO nova.compute.manager [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Terminating instance [ 1412.052554] env[62476]: DEBUG nova.compute.manager [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1412.052758] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1412.053256] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5afecc8c-d991-4762-8d0e-12c3da8bcd1c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.059450] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a9a06075-ff8e-401e-9b3a-055fb50c2e2a] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1412.065737] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a585720-cd3b-486b-af10-83ab2ec88685 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.084460] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a9a06075-ff8e-401e-9b3a-055fb50c2e2a] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1412.096896] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f4e97733-101b-46dd-aec4-a3287b120eb0 could not be found. [ 1412.097124] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1412.097308] env[62476]: INFO nova.compute.manager [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1412.097557] env[62476]: DEBUG oslo.service.loopingcall [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1412.098341] env[62476]: DEBUG nova.compute.manager [-] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1412.098446] env[62476]: DEBUG nova.network.neutron [-] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1412.110264] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "a9a06075-ff8e-401e-9b3a-055fb50c2e2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.310s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.120814] env[62476]: DEBUG nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1412.170919] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.171185] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.172707] env[62476]: INFO nova.compute.claims [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1412.192523] env[62476]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62476) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1412.192853] env[62476]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-5e1f1c7a-85ff-402a-9a52-df965ba8ad1c'] [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1412.193387] env[62476]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1412.193801] env[62476]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1412.194421] env[62476]: ERROR oslo.service.loopingcall [ 1412.195051] env[62476]: ERROR nova.compute.manager [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1412.230338] env[62476]: ERROR nova.compute.manager [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1412.230338] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Traceback (most recent call last): [ 1412.230338] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.230338] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] ret = obj(*args, **kwargs) [ 1412.230338] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1412.230338] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] exception_handler_v20(status_code, error_body) [ 1412.230338] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1412.230338] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] raise client_exc(message=error_message, [ 1412.230338] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1412.230338] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Neutron server returns request_ids: ['req-5e1f1c7a-85ff-402a-9a52-df965ba8ad1c'] [ 1412.230338] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] [ 1412.232304] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] During handling of the above exception, another exception occurred: [ 1412.232304] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] [ 1412.232304] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Traceback (most recent call last): [ 1412.232304] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1412.232304] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self._delete_instance(context, instance, bdms) [ 1412.232304] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1412.232304] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self._shutdown_instance(context, instance, bdms) [ 1412.232304] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1412.232304] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self._try_deallocate_network(context, instance, requested_networks) [ 1412.232304] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1412.232304] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] with excutils.save_and_reraise_exception(): [ 1412.232304] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1412.232304] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self.force_reraise() [ 1412.232659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1412.232659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] raise self.value [ 1412.232659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1412.232659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] _deallocate_network_with_retries() [ 1412.232659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1412.232659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return evt.wait() [ 1412.232659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1412.232659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] result = hub.switch() [ 1412.232659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1412.232659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return self.greenlet.switch() [ 1412.232659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1412.232659] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] result = func(*self.args, **self.kw) [ 1412.233015] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1412.233015] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] result = f(*args, **kwargs) [ 1412.233015] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1412.233015] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self._deallocate_network( [ 1412.233015] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1412.233015] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self.network_api.deallocate_for_instance( [ 1412.233015] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1412.233015] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] data = neutron.list_ports(**search_opts) [ 1412.233015] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.233015] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] ret = obj(*args, **kwargs) [ 1412.233015] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1412.233015] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return self.list('ports', self.ports_path, retrieve_all, [ 1412.233015] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.233383] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] ret = obj(*args, **kwargs) [ 1412.233383] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1412.233383] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] for r in self._pagination(collection, path, **params): [ 1412.233383] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1412.233383] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] res = self.get(path, params=params) [ 1412.233383] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.233383] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] ret = obj(*args, **kwargs) [ 1412.233383] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1412.233383] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return self.retry_request("GET", action, body=body, [ 1412.233383] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.233383] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] ret = obj(*args, **kwargs) [ 1412.233383] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1412.233383] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] return self.do_request(method, action, body=body, [ 1412.233729] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.233729] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] ret = obj(*args, **kwargs) [ 1412.233729] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1412.233729] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] self._handle_fault_response(status_code, replybody, resp) [ 1412.233729] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1412.233729] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1412.233729] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1412.233729] env[62476]: ERROR nova.compute.manager [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] [ 1412.259877] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Lock "f4e97733-101b-46dd-aec4-a3287b120eb0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.211s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.260563] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "f4e97733-101b-46dd-aec4-a3287b120eb0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 205.825s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.260753] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] During sync_power_state the instance has a pending task (deleting). Skip. [ 1412.260928] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "f4e97733-101b-46dd-aec4-a3287b120eb0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.308937] env[62476]: INFO nova.compute.manager [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] [instance: f4e97733-101b-46dd-aec4-a3287b120eb0] Successfully reverted task state from None on failure for instance. [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server [None req-a504383b-516c-4c05-bedc-05e1748900e2 tempest-ListImageFiltersTestJSON-1669522138 tempest-ListImageFiltersTestJSON-1669522138-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-5e1f1c7a-85ff-402a-9a52-df965ba8ad1c'] [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1412.315295] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1412.315763] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1412.316608] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1412.317148] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1412.317631] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1412.318135] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1412.318678] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1412.318678] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1412.318678] env[62476]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1412.318678] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1412.318678] env[62476]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1412.318678] env[62476]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1412.318678] env[62476]: ERROR oslo_messaging.rpc.server [ 1412.488417] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbccd163-f043-4e29-b805-90864bf55cf5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.497063] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4285eb02-716d-472d-b3b7-3f1eaf59f66b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.528668] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c480e3-05ea-4ead-b54d-7bc82c5abf55 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.538026] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5e1115-2a2a-458d-8ef6-253200f79ba8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.551833] env[62476]: DEBUG nova.compute.provider_tree [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1412.561134] env[62476]: DEBUG nova.scheduler.client.report [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1412.577337] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.406s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.577894] env[62476]: DEBUG nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1412.614234] env[62476]: DEBUG nova.compute.utils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1412.615545] env[62476]: DEBUG nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1412.615882] env[62476]: DEBUG nova.network.neutron [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1412.625051] env[62476]: DEBUG nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1412.693017] env[62476]: DEBUG nova.policy [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10dc9791cc96471c926e4eb8e1129b2f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cdbe9b66c724475a673e94fdb118821', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1412.697138] env[62476]: DEBUG nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1412.725038] env[62476]: DEBUG nova.virt.hardware [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1412.725306] env[62476]: DEBUG nova.virt.hardware [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1412.725464] env[62476]: DEBUG nova.virt.hardware [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1412.725645] env[62476]: DEBUG nova.virt.hardware [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1412.725793] env[62476]: DEBUG nova.virt.hardware [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1412.725940] env[62476]: DEBUG nova.virt.hardware [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1412.726172] env[62476]: DEBUG nova.virt.hardware [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1412.726334] env[62476]: DEBUG nova.virt.hardware [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1412.726502] env[62476]: DEBUG nova.virt.hardware [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1412.726663] env[62476]: DEBUG nova.virt.hardware [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1412.726834] env[62476]: DEBUG nova.virt.hardware [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1412.727719] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72a39bb-075f-4105-a01d-9d9dd8da742e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.736789] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69e81cf-2b21-40ff-a49e-cab1c5965ace {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.027071] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.040350] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.040611] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.040780] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.040939] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1413.042660] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11085c2a-dee0-4898-b4b9-11ea8a6ff9e9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.052699] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d4cc3c-998e-4329-9d20-6b7bb1f5f576 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.068842] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390ea45c-b749-4bad-9224-53ffe9010085 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.076095] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb37a844-a804-431d-a3b3-28da85dcb323 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.107559] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180692MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1413.107746] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.107953] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.184236] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.184411] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance eca46087-33a7-4e9d-a7ce-6094886704a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.184528] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3cdef023-ce78-4c3b-8476-5508c18204c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.184650] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance bfd1d3fe-c8ba-4b77-b633-f77010674954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.184772] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.184893] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4954bf5d-20db-4787-91b5-a990ed30cdf3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.185017] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7211a8c4-5430-4b0c-86e7-8101ed71463e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.185139] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6f133a49-bb62-45c6-a014-a2f99766d092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.185254] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a0490305-7494-4612-843f-bac04dd0f328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.185369] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 139391d4-af04-4053-801a-792fc4fd724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.197198] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 0d9c9bf8-5fd9-4d26-8945-0c8a1adb230f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.208442] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5c5fe542-5362-4fe9-a359-ea3eac825ca0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.220183] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance b304faf2-127c-4185-89c4-84093c81cf6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.231103] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 003e332b-9765-4db7-9f48-40d33c6532d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.242235] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e95a41ff-af11-48ac-8245-c70eb0a73c7e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.255910] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6a5ba30d-8a44-49bb-b061-fadd99dc4d4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.266793] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 27737774-efb5-4aee-a0c0-695e78a15dd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.278934] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f082523d-622b-4d64-b15f-a8511261f4b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.290800] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance fe895d70-4c56-4854-83bf-a66cc1623d59 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.291051] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1413.291262] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1413.375089] env[62476]: DEBUG nova.network.neutron [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Successfully created port: bc5682c3-9b92-4102-b0e5-d5bfd025e2bd {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1413.584899] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173ee11f-c490-4573-9583-0cd6ef8651dd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.592673] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c5daa3-f830-4c85-8b1c-464db87c2e9e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.627549] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b77331-ed4f-4584-8aa6-fa0f88117bc5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.635966] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcb15ab-2c13-4ea8-9067-04cc52202c5d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.650915] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1413.663486] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1413.685384] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1413.685596] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.578s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.148204] env[62476]: DEBUG nova.compute.manager [req-2ae7bc2d-cb8f-4032-aaff-8d32c5af7e13 req-30ae2799-3731-4361-a017-a6f400b55232 service nova] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Received event network-vif-plugged-bc5682c3-9b92-4102-b0e5-d5bfd025e2bd {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1414.148466] env[62476]: DEBUG oslo_concurrency.lockutils [req-2ae7bc2d-cb8f-4032-aaff-8d32c5af7e13 req-30ae2799-3731-4361-a017-a6f400b55232 service nova] Acquiring lock "139391d4-af04-4053-801a-792fc4fd724a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.148797] env[62476]: DEBUG oslo_concurrency.lockutils [req-2ae7bc2d-cb8f-4032-aaff-8d32c5af7e13 req-30ae2799-3731-4361-a017-a6f400b55232 service nova] Lock "139391d4-af04-4053-801a-792fc4fd724a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.149568] env[62476]: DEBUG oslo_concurrency.lockutils [req-2ae7bc2d-cb8f-4032-aaff-8d32c5af7e13 req-30ae2799-3731-4361-a017-a6f400b55232 service nova] Lock "139391d4-af04-4053-801a-792fc4fd724a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.149795] env[62476]: DEBUG nova.compute.manager [req-2ae7bc2d-cb8f-4032-aaff-8d32c5af7e13 req-30ae2799-3731-4361-a017-a6f400b55232 service nova] [instance: 139391d4-af04-4053-801a-792fc4fd724a] No waiting events found dispatching network-vif-plugged-bc5682c3-9b92-4102-b0e5-d5bfd025e2bd {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1414.149992] env[62476]: WARNING nova.compute.manager [req-2ae7bc2d-cb8f-4032-aaff-8d32c5af7e13 req-30ae2799-3731-4361-a017-a6f400b55232 service nova] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Received unexpected event network-vif-plugged-bc5682c3-9b92-4102-b0e5-d5bfd025e2bd for instance with vm_state building and task_state spawning. [ 1414.230017] env[62476]: DEBUG nova.network.neutron [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Successfully updated port: bc5682c3-9b92-4102-b0e5-d5bfd025e2bd {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1414.243656] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "refresh_cache-139391d4-af04-4053-801a-792fc4fd724a" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.243784] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquired lock "refresh_cache-139391d4-af04-4053-801a-792fc4fd724a" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.243939] env[62476]: DEBUG nova.network.neutron [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1414.303922] env[62476]: DEBUG nova.network.neutron [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1414.527960] env[62476]: DEBUG nova.network.neutron [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Updating instance_info_cache with network_info: [{"id": "bc5682c3-9b92-4102-b0e5-d5bfd025e2bd", "address": "fa:16:3e:24:44:8c", "network": {"id": "7951b440-c6fc-4447-b736-de183b5d8603", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1845317819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cdbe9b66c724475a673e94fdb118821", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc5682c3-9b", "ovs_interfaceid": "bc5682c3-9b92-4102-b0e5-d5bfd025e2bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1414.547160] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Releasing lock "refresh_cache-139391d4-af04-4053-801a-792fc4fd724a" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.547486] env[62476]: DEBUG nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Instance network_info: |[{"id": "bc5682c3-9b92-4102-b0e5-d5bfd025e2bd", "address": "fa:16:3e:24:44:8c", "network": {"id": "7951b440-c6fc-4447-b736-de183b5d8603", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1845317819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cdbe9b66c724475a673e94fdb118821", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc5682c3-9b", "ovs_interfaceid": "bc5682c3-9b92-4102-b0e5-d5bfd025e2bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1414.547918] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:44:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24376631-ee89-4ff1-b8ac-f09911fc8329', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc5682c3-9b92-4102-b0e5-d5bfd025e2bd', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1414.556483] env[62476]: DEBUG oslo.service.loopingcall [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1414.557119] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1414.557427] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c985b0af-5e0d-43c2-9ee4-564573114c09 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.579201] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1414.579201] env[62476]: value = "task-4319125" [ 1414.579201] env[62476]: _type = "Task" [ 1414.579201] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.588254] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319125, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.686398] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.686587] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1414.686711] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1414.711446] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1414.711718] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1414.711943] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1414.712046] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1414.712193] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1414.712364] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1414.712477] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1414.712699] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1414.712822] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a0490305-7494-4612-843f-bac04dd0f328] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1414.712909] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1414.713020] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1415.091699] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319125, 'name': CreateVM_Task, 'duration_secs': 0.309241} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.091864] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1415.092582] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.092741] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.093087] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1415.093390] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a97dedaf-d481-4598-9c88-d971c5197247 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.098820] env[62476]: DEBUG oslo_vmware.api [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for the task: (returnval){ [ 1415.098820] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52807f83-5101-ecef-be89-3cd00d702c00" [ 1415.098820] env[62476]: _type = "Task" [ 1415.098820] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.108248] env[62476]: DEBUG oslo_vmware.api [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52807f83-5101-ecef-be89-3cd00d702c00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.610206] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.610591] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1415.610694] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.027278] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1416.180832] env[62476]: DEBUG nova.compute.manager [req-d0e7e21d-0f61-485d-a9ff-1694ba852810 req-4ab2a3b7-fab8-4166-9a2b-10fdc0317854 service nova] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Received event network-changed-bc5682c3-9b92-4102-b0e5-d5bfd025e2bd {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1416.181041] env[62476]: DEBUG nova.compute.manager [req-d0e7e21d-0f61-485d-a9ff-1694ba852810 req-4ab2a3b7-fab8-4166-9a2b-10fdc0317854 service nova] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Refreshing instance network info cache due to event network-changed-bc5682c3-9b92-4102-b0e5-d5bfd025e2bd. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1416.181258] env[62476]: DEBUG oslo_concurrency.lockutils [req-d0e7e21d-0f61-485d-a9ff-1694ba852810 req-4ab2a3b7-fab8-4166-9a2b-10fdc0317854 service nova] Acquiring lock "refresh_cache-139391d4-af04-4053-801a-792fc4fd724a" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.181404] env[62476]: DEBUG oslo_concurrency.lockutils [req-d0e7e21d-0f61-485d-a9ff-1694ba852810 req-4ab2a3b7-fab8-4166-9a2b-10fdc0317854 service nova] Acquired lock "refresh_cache-139391d4-af04-4053-801a-792fc4fd724a" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.181560] env[62476]: DEBUG nova.network.neutron [req-d0e7e21d-0f61-485d-a9ff-1694ba852810 req-4ab2a3b7-fab8-4166-9a2b-10fdc0317854 service nova] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Refreshing network info cache for port bc5682c3-9b92-4102-b0e5-d5bfd025e2bd {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1416.503960] env[62476]: DEBUG nova.network.neutron [req-d0e7e21d-0f61-485d-a9ff-1694ba852810 req-4ab2a3b7-fab8-4166-9a2b-10fdc0317854 service nova] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Updated VIF entry in instance network info cache for port bc5682c3-9b92-4102-b0e5-d5bfd025e2bd. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1416.504428] env[62476]: DEBUG nova.network.neutron [req-d0e7e21d-0f61-485d-a9ff-1694ba852810 req-4ab2a3b7-fab8-4166-9a2b-10fdc0317854 service nova] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Updating instance_info_cache with network_info: [{"id": "bc5682c3-9b92-4102-b0e5-d5bfd025e2bd", "address": "fa:16:3e:24:44:8c", "network": {"id": "7951b440-c6fc-4447-b736-de183b5d8603", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1845317819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cdbe9b66c724475a673e94fdb118821", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc5682c3-9b", "ovs_interfaceid": "bc5682c3-9b92-4102-b0e5-d5bfd025e2bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.515117] env[62476]: DEBUG oslo_concurrency.lockutils [req-d0e7e21d-0f61-485d-a9ff-1694ba852810 req-4ab2a3b7-fab8-4166-9a2b-10fdc0317854 service nova] Releasing lock "refresh_cache-139391d4-af04-4053-801a-792fc4fd724a" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.027072] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1417.027409] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1417.027589] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1420.027343] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1420.027689] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1421.027327] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.022103] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.947938] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15edfb5e-aff8-4200-9e33-ae40e1629f6f tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "139391d4-af04-4053-801a-792fc4fd724a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.022092] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1445.483767] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquiring lock "c7e551af-a94e-48da-a725-53ebd73d43ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.484251] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Lock "c7e551af-a94e-48da-a725-53ebd73d43ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.726317] env[62476]: WARNING oslo_vmware.rw_handles [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1457.726317] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1457.726317] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1457.726317] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1457.726317] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1457.726317] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1457.726317] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1457.726317] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1457.726317] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1457.726317] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1457.726317] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1457.726317] env[62476]: ERROR oslo_vmware.rw_handles [ 1457.726990] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/0e65f550-c280-42ed-ac72-14355d54703b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1457.729601] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1457.729961] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Copying Virtual Disk [datastore1] vmware_temp/0e65f550-c280-42ed-ac72-14355d54703b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/0e65f550-c280-42ed-ac72-14355d54703b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1457.730393] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2bc69fbd-3838-40bd-a369-4f93d731415a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.741192] env[62476]: DEBUG oslo_vmware.api [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for the task: (returnval){ [ 1457.741192] env[62476]: value = "task-4319126" [ 1457.741192] env[62476]: _type = "Task" [ 1457.741192] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.749956] env[62476]: DEBUG oslo_vmware.api [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': task-4319126, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.252214] env[62476]: DEBUG oslo_vmware.exceptions [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1458.252516] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1458.253092] env[62476]: ERROR nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1458.253092] env[62476]: Faults: ['InvalidArgument'] [ 1458.253092] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Traceback (most recent call last): [ 1458.253092] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1458.253092] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] yield resources [ 1458.253092] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1458.253092] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] self.driver.spawn(context, instance, image_meta, [ 1458.253092] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1458.253092] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1458.253092] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1458.253092] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] self._fetch_image_if_missing(context, vi) [ 1458.253092] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1458.253497] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] image_cache(vi, tmp_image_ds_loc) [ 1458.253497] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1458.253497] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] vm_util.copy_virtual_disk( [ 1458.253497] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1458.253497] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] session._wait_for_task(vmdk_copy_task) [ 1458.253497] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1458.253497] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] return self.wait_for_task(task_ref) [ 1458.253497] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1458.253497] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] return evt.wait() [ 1458.253497] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1458.253497] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] result = hub.switch() [ 1458.253497] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1458.253497] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] return self.greenlet.switch() [ 1458.253916] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1458.253916] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] self.f(*self.args, **self.kw) [ 1458.253916] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1458.253916] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] raise exceptions.translate_fault(task_info.error) [ 1458.253916] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1458.253916] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Faults: ['InvalidArgument'] [ 1458.253916] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] [ 1458.253916] env[62476]: INFO nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Terminating instance [ 1458.255041] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.255260] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1458.255511] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ca0dc15-e63c-4206-98b7-0d05d6ffe48d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.257965] env[62476]: DEBUG nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1458.258182] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1458.259099] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3a9dff-b6e9-44d1-a73d-092785232e72 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.266207] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1458.266439] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a5c0ef7-a3b3-4670-941c-bcfe5839f7e0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.269024] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1458.269221] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1458.270243] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d71f2ef-8a7c-44e5-a5a7-88892ef98028 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.275443] env[62476]: DEBUG oslo_vmware.api [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Waiting for the task: (returnval){ [ 1458.275443] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]529b7e78-0acb-d43c-9070-2de503291f89" [ 1458.275443] env[62476]: _type = "Task" [ 1458.275443] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.291207] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1458.291411] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Creating directory with path [datastore1] vmware_temp/b51e9912-b592-4d68-ab75-48bca46b23fb/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1458.291640] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a5b38a0-2534-4c5d-a95f-b749af03d436 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.315721] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Created directory with path [datastore1] vmware_temp/b51e9912-b592-4d68-ab75-48bca46b23fb/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1458.315721] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Fetch image to [datastore1] vmware_temp/b51e9912-b592-4d68-ab75-48bca46b23fb/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1458.315721] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/b51e9912-b592-4d68-ab75-48bca46b23fb/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1458.316633] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d09cc3-43a4-4aa4-aead-eefe32f733b7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.324375] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1dfaa04-42bf-42a3-b6a7-1f7323c26eee {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.334308] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6ddec4-8199-46ca-a070-e37d1e6488b0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.366318] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e294bccf-6420-4155-94c6-73355356182a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.369141] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1458.369337] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1458.369509] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Deleting the datastore file [datastore1] 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1458.369794] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b496b6d-9df7-452f-addb-a967e49c6843 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.375297] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e52f5bfc-d9f9-4ca8-a307-87b605107922 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.378219] env[62476]: DEBUG oslo_vmware.api [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for the task: (returnval){ [ 1458.378219] env[62476]: value = "task-4319128" [ 1458.378219] env[62476]: _type = "Task" [ 1458.378219] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.385923] env[62476]: DEBUG oslo_vmware.api [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': task-4319128, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.400113] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1458.580759] env[62476]: DEBUG oslo_vmware.rw_handles [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b51e9912-b592-4d68-ab75-48bca46b23fb/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1458.641721] env[62476]: DEBUG oslo_vmware.rw_handles [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1458.641994] env[62476]: DEBUG oslo_vmware.rw_handles [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b51e9912-b592-4d68-ab75-48bca46b23fb/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1458.889508] env[62476]: DEBUG oslo_vmware.api [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': task-4319128, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078071} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.889804] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1458.889981] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1458.890136] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1458.890316] env[62476]: INFO nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1458.892427] env[62476]: DEBUG nova.compute.claims [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1458.892607] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.892819] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.157549] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbe83b5-efa0-4ce6-a728-b7fe829e43f5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.165651] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f69be0e-d3f0-4071-afa0-3bd35f6c6818 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.196891] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e21ff6c-023f-4157-b3a7-59d376234c68 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.205018] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2467dd4e-ffbd-4b1f-955c-b46808bf9476 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.219357] env[62476]: DEBUG nova.compute.provider_tree [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1459.228279] env[62476]: DEBUG nova.scheduler.client.report [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1459.245593] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.353s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.246153] env[62476]: ERROR nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1459.246153] env[62476]: Faults: ['InvalidArgument'] [ 1459.246153] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Traceback (most recent call last): [ 1459.246153] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1459.246153] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] self.driver.spawn(context, instance, image_meta, [ 1459.246153] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1459.246153] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1459.246153] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1459.246153] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] self._fetch_image_if_missing(context, vi) [ 1459.246153] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1459.246153] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] image_cache(vi, tmp_image_ds_loc) [ 1459.246153] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1459.246499] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] vm_util.copy_virtual_disk( [ 1459.246499] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1459.246499] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] session._wait_for_task(vmdk_copy_task) [ 1459.246499] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1459.246499] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] return self.wait_for_task(task_ref) [ 1459.246499] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1459.246499] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] return evt.wait() [ 1459.246499] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1459.246499] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] result = hub.switch() [ 1459.246499] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1459.246499] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] return self.greenlet.switch() [ 1459.246499] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1459.246499] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] self.f(*self.args, **self.kw) [ 1459.246892] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1459.246892] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] raise exceptions.translate_fault(task_info.error) [ 1459.246892] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1459.246892] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Faults: ['InvalidArgument'] [ 1459.246892] env[62476]: ERROR nova.compute.manager [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] [ 1459.246892] env[62476]: DEBUG nova.compute.utils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1459.248411] env[62476]: DEBUG nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Build of instance 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 was re-scheduled: A specified parameter was not correct: fileType [ 1459.248411] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1459.248811] env[62476]: DEBUG nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1459.248982] env[62476]: DEBUG nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1459.249170] env[62476]: DEBUG nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1459.249332] env[62476]: DEBUG nova.network.neutron [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1459.550145] env[62476]: DEBUG nova.network.neutron [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.562264] env[62476]: INFO nova.compute.manager [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Took 0.31 seconds to deallocate network for instance. [ 1459.673683] env[62476]: INFO nova.scheduler.client.report [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Deleted allocations for instance 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 [ 1459.694689] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab0a502c-7d57-419a-8f5d-88df5d191cc3 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 624.755s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.695927] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8808f1c-cb73-4248-836f-3567c7ccc51c tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 428.914s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.696194] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8808f1c-cb73-4248-836f-3567c7ccc51c tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.696411] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8808f1c-cb73-4248-836f-3567c7ccc51c tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.696610] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8808f1c-cb73-4248-836f-3567c7ccc51c tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.699297] env[62476]: INFO nova.compute.manager [None req-d8808f1c-cb73-4248-836f-3567c7ccc51c tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Terminating instance [ 1459.701393] env[62476]: DEBUG nova.compute.manager [None req-d8808f1c-cb73-4248-836f-3567c7ccc51c tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1459.701589] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d8808f1c-cb73-4248-836f-3567c7ccc51c tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1459.702090] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-737a96bf-b2b3-4668-bc26-c28b3890e4dd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.706857] env[62476]: DEBUG nova.compute.manager [None req-2d57938c-8622-4a47-8f6d-5e247a1ec9d5 tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] [instance: 0d9c9bf8-5fd9-4d26-8945-0c8a1adb230f] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1459.713297] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8357e7-f38c-476f-b340-6be00a1144af {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.733531] env[62476]: DEBUG nova.compute.manager [None req-2d57938c-8622-4a47-8f6d-5e247a1ec9d5 tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] [instance: 0d9c9bf8-5fd9-4d26-8945-0c8a1adb230f] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1459.744404] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-d8808f1c-cb73-4248-836f-3567c7ccc51c tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 87f2ddc2-11d2-49de-a3de-9e7082ab88c4 could not be found. [ 1459.744665] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d8808f1c-cb73-4248-836f-3567c7ccc51c tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1459.744880] env[62476]: INFO nova.compute.manager [None req-d8808f1c-cb73-4248-836f-3567c7ccc51c tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1459.745196] env[62476]: DEBUG oslo.service.loopingcall [None req-d8808f1c-cb73-4248-836f-3567c7ccc51c tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1459.747956] env[62476]: DEBUG nova.compute.manager [-] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1459.748094] env[62476]: DEBUG nova.network.neutron [-] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1459.758544] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2d57938c-8622-4a47-8f6d-5e247a1ec9d5 tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] Lock "0d9c9bf8-5fd9-4d26-8945-0c8a1adb230f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.121s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.769551] env[62476]: DEBUG nova.compute.manager [None req-5f7f8ae1-8749-4db3-9638-cd56cee92e2a tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] [instance: 5c5fe542-5362-4fe9-a359-ea3eac825ca0] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1459.775282] env[62476]: DEBUG nova.network.neutron [-] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.782988] env[62476]: INFO nova.compute.manager [-] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] Took 0.03 seconds to deallocate network for instance. [ 1459.800956] env[62476]: DEBUG nova.compute.manager [None req-5f7f8ae1-8749-4db3-9638-cd56cee92e2a tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] [instance: 5c5fe542-5362-4fe9-a359-ea3eac825ca0] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1459.825187] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5f7f8ae1-8749-4db3-9638-cd56cee92e2a tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] Lock "5c5fe542-5362-4fe9-a359-ea3eac825ca0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.530s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.835819] env[62476]: DEBUG nova.compute.manager [None req-5be2cd61-1b12-4e36-ae96-1ce6b355ecda tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] [instance: b304faf2-127c-4185-89c4-84093c81cf6b] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1459.859711] env[62476]: DEBUG nova.compute.manager [None req-5be2cd61-1b12-4e36-ae96-1ce6b355ecda tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] [instance: b304faf2-127c-4185-89c4-84093c81cf6b] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1459.886173] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8808f1c-cb73-4248-836f-3567c7ccc51c tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.887127] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 253.451s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.887327] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 87f2ddc2-11d2-49de-a3de-9e7082ab88c4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1459.887499] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "87f2ddc2-11d2-49de-a3de-9e7082ab88c4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.891821] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5be2cd61-1b12-4e36-ae96-1ce6b355ecda tempest-ListServerFiltersTestJSON-82712899 tempest-ListServerFiltersTestJSON-82712899-project-member] Lock "b304faf2-127c-4185-89c4-84093c81cf6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.196s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.900252] env[62476]: DEBUG nova.compute.manager [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1459.974426] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.974676] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.976125] env[62476]: INFO nova.compute.claims [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1460.256851] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b596eda8-089c-47e3-a1d3-fea80d181653 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.265113] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56afda62-eea6-419d-8843-880b8aeccbc8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.296898] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d16f593-69b0-4187-a283-874d5fff1a30 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.305201] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba50b4d-a9f0-4478-8124-283f6d63e110 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.320485] env[62476]: DEBUG nova.compute.provider_tree [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1460.330362] env[62476]: DEBUG nova.scheduler.client.report [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1460.347368] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.372s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.348507] env[62476]: DEBUG nova.compute.manager [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1460.384663] env[62476]: DEBUG nova.compute.utils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1460.385960] env[62476]: DEBUG nova.compute.manager [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Not allocating networking since 'none' was specified. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1460.398717] env[62476]: DEBUG nova.compute.manager [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1460.484427] env[62476]: DEBUG nova.compute.manager [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1460.509772] env[62476]: DEBUG nova.virt.hardware [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1460.510056] env[62476]: DEBUG nova.virt.hardware [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1460.510219] env[62476]: DEBUG nova.virt.hardware [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1460.510400] env[62476]: DEBUG nova.virt.hardware [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1460.510548] env[62476]: DEBUG nova.virt.hardware [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1460.510698] env[62476]: DEBUG nova.virt.hardware [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1460.511097] env[62476]: DEBUG nova.virt.hardware [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1460.511097] env[62476]: DEBUG nova.virt.hardware [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1460.511266] env[62476]: DEBUG nova.virt.hardware [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1460.511429] env[62476]: DEBUG nova.virt.hardware [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1460.511606] env[62476]: DEBUG nova.virt.hardware [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1460.512545] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b507c57f-6a52-498d-a063-e23b135e6683 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.520376] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f177cf-3d58-45b0-82be-b98e67470a10 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.534782] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Instance VIF info [] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1460.540219] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Creating folder: Project (522c2a4690924051bd1a394c7b642cc9). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1460.540468] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-873040a2-fb4f-45bb-ac11-91b9c0bc93e1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.550222] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Created folder: Project (522c2a4690924051bd1a394c7b642cc9) in parent group-v849485. [ 1460.550407] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Creating folder: Instances. Parent ref: group-v849557. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1460.550628] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2bb32346-3b1b-4384-a6ab-7d1d3b5f2542 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.559871] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Created folder: Instances in parent group-v849557. [ 1460.560120] env[62476]: DEBUG oslo.service.loopingcall [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1460.560310] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1460.560511] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e09ccf71-c433-465e-beba-64f5b01e4c0b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.576882] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1460.576882] env[62476]: value = "task-4319131" [ 1460.576882] env[62476]: _type = "Task" [ 1460.576882] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.584212] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319131, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.088263] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319131, 'name': CreateVM_Task} progress is 99%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.587522] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319131, 'name': CreateVM_Task} progress is 99%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.087737] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319131, 'name': CreateVM_Task, 'duration_secs': 1.292839} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.087895] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1462.088320] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1462.088576] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.088835] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1462.089125] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6eb93586-059d-41e0-8af1-a99317ae9e5f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.093759] env[62476]: DEBUG oslo_vmware.api [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Waiting for the task: (returnval){ [ 1462.093759] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52ff306e-0835-e460-d5fa-4a021bc912f4" [ 1462.093759] env[62476]: _type = "Task" [ 1462.093759] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.102119] env[62476]: DEBUG oslo_vmware.api [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52ff306e-0835-e460-d5fa-4a021bc912f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.604103] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.604286] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1462.604491] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1468.626538] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "ea606214-a34b-4972-8948-a6ff8c55b889" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.626538] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "ea606214-a34b-4972-8948-a6ff8c55b889" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.340157] env[62476]: DEBUG oslo_concurrency.lockutils [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquiring lock "003e332b-9765-4db7-9f48-40d33c6532d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.027049] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.039352] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.039577] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.039753] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.039926] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1473.041156] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3538ec3d-4a0b-4ca7-8c18-59d95f822c22 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.050188] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a58603c-796c-46fc-9627-6edd288cfa76 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.065455] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed66bc6-056f-45c5-a746-a2f6469faa06 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.071975] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-659bb071-032f-4ca3-b777-915e665df74a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.100369] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180719MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1473.100522] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.100705] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.246849] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance eca46087-33a7-4e9d-a7ce-6094886704a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.247032] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3cdef023-ce78-4c3b-8476-5508c18204c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.247167] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance bfd1d3fe-c8ba-4b77-b633-f77010674954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.247344] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.247472] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4954bf5d-20db-4787-91b5-a990ed30cdf3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.247592] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7211a8c4-5430-4b0c-86e7-8101ed71463e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.247709] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6f133a49-bb62-45c6-a014-a2f99766d092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.247829] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a0490305-7494-4612-843f-bac04dd0f328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.247945] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 139391d4-af04-4053-801a-792fc4fd724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.248067] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 003e332b-9765-4db7-9f48-40d33c6532d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1473.259160] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6a5ba30d-8a44-49bb-b061-fadd99dc4d4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1473.270019] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 27737774-efb5-4aee-a0c0-695e78a15dd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1473.280052] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance f082523d-622b-4d64-b15f-a8511261f4b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1473.289843] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance fe895d70-4c56-4854-83bf-a66cc1623d59 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1473.299486] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance c7e551af-a94e-48da-a725-53ebd73d43ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1473.309909] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ea606214-a34b-4972-8948-a6ff8c55b889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1473.310377] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1473.310377] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1473.327394] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing inventories for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1473.341675] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Updating ProviderTree inventory for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1473.341862] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Updating inventory in ProviderTree for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1473.353613] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing aggregate associations for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11, aggregates: None {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1473.372154] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing trait associations for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1473.570129] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21bbeca2-25a4-49a7-80ea-3107ca17ea38 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.578010] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c793a4-680e-4f67-846d-40b6b6459437 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.608522] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964203d0-02fc-4cef-9de9-e005dcb1afce {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.616968] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bac2284-4c6f-4989-a0bf-7e0106ffbe33 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.630621] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1473.640638] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1473.654565] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1473.654754] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.554s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.027656] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.027910] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Cleaning up deleted instances with incomplete migration {{(pid=62476) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1476.037584] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1476.037992] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1476.037992] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1476.060624] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1476.060806] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1476.060907] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1476.061054] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1476.061185] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1476.061365] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1476.061421] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1476.061538] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a0490305-7494-4612-843f-bac04dd0f328] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1476.061656] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1476.061772] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1476.061893] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1477.026946] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1478.026687] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1478.027274] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1478.027274] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1480.027480] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1481.027765] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1481.028038] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1483.038623] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1484.022897] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1490.027487] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1490.027848] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Cleaning up deleted instances {{(pid=62476) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1490.038779] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] There are 0 instances to clean {{(pid=62476) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1504.529667] env[62476]: WARNING oslo_vmware.rw_handles [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1504.529667] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1504.529667] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1504.529667] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1504.529667] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1504.529667] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1504.529667] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1504.529667] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1504.529667] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1504.529667] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1504.529667] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1504.529667] env[62476]: ERROR oslo_vmware.rw_handles [ 1504.530258] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/b51e9912-b592-4d68-ab75-48bca46b23fb/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1504.532047] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1504.532288] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Copying Virtual Disk [datastore1] vmware_temp/b51e9912-b592-4d68-ab75-48bca46b23fb/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/b51e9912-b592-4d68-ab75-48bca46b23fb/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1504.532566] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82cc59fc-2e08-4f57-b255-74f649f667b0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.541549] env[62476]: DEBUG oslo_vmware.api [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Waiting for the task: (returnval){ [ 1504.541549] env[62476]: value = "task-4319132" [ 1504.541549] env[62476]: _type = "Task" [ 1504.541549] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.550855] env[62476]: DEBUG oslo_vmware.api [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Task: {'id': task-4319132, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.051772] env[62476]: DEBUG oslo_vmware.exceptions [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1505.052058] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.052638] env[62476]: ERROR nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1505.052638] env[62476]: Faults: ['InvalidArgument'] [ 1505.052638] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Traceback (most recent call last): [ 1505.052638] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1505.052638] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] yield resources [ 1505.052638] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1505.052638] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] self.driver.spawn(context, instance, image_meta, [ 1505.052638] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1505.052638] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1505.052638] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1505.052638] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] self._fetch_image_if_missing(context, vi) [ 1505.052638] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1505.053082] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] image_cache(vi, tmp_image_ds_loc) [ 1505.053082] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1505.053082] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] vm_util.copy_virtual_disk( [ 1505.053082] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1505.053082] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] session._wait_for_task(vmdk_copy_task) [ 1505.053082] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1505.053082] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] return self.wait_for_task(task_ref) [ 1505.053082] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1505.053082] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] return evt.wait() [ 1505.053082] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1505.053082] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] result = hub.switch() [ 1505.053082] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1505.053082] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] return self.greenlet.switch() [ 1505.053472] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1505.053472] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] self.f(*self.args, **self.kw) [ 1505.053472] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1505.053472] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] raise exceptions.translate_fault(task_info.error) [ 1505.053472] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1505.053472] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Faults: ['InvalidArgument'] [ 1505.053472] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] [ 1505.053472] env[62476]: INFO nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Terminating instance [ 1505.054524] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.054735] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1505.054974] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f78a09c-8642-4b31-80d3-67197ce4c1b0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.057299] env[62476]: DEBUG nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1505.057489] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1505.058217] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02c9ce5-922c-494a-8995-b6a48bb96710 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.065237] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1505.065475] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d2c77d2-f3f5-4563-a077-c3733ad24be5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.067739] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1505.067911] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1505.068898] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb423439-ffeb-4b87-9836-5f60be148c35 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.074076] env[62476]: DEBUG oslo_vmware.api [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Waiting for the task: (returnval){ [ 1505.074076] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52e815ca-ee81-9aab-fa92-5397cdfe0f7a" [ 1505.074076] env[62476]: _type = "Task" [ 1505.074076] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.081617] env[62476]: DEBUG oslo_vmware.api [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52e815ca-ee81-9aab-fa92-5397cdfe0f7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.585062] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1505.585062] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Creating directory with path [datastore1] vmware_temp/1f61c7b0-b74d-4ad4-987a-2393b20f8c50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1505.585062] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d11c286-f5a0-40ce-bdc7-cc885d4c3b3c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.604847] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Created directory with path [datastore1] vmware_temp/1f61c7b0-b74d-4ad4-987a-2393b20f8c50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1505.605054] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Fetch image to [datastore1] vmware_temp/1f61c7b0-b74d-4ad4-987a-2393b20f8c50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1505.605227] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/1f61c7b0-b74d-4ad4-987a-2393b20f8c50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1505.605994] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99fbae07-64be-4a5d-9849-25d949b843b2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.613269] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3ba5c90-0afd-43c9-8e84-a9af738e0b2c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.622526] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc0d4b9-18d6-4def-b24e-1e1fe9710f39 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.652418] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3b5475-322f-4d84-88a5-c05fa99067c7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.658433] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1a8ce3e8-812f-446a-9404-4505e0e1b918 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.679309] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1505.731925] env[62476]: DEBUG oslo_vmware.rw_handles [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1f61c7b0-b74d-4ad4-987a-2393b20f8c50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1505.792929] env[62476]: DEBUG oslo_vmware.rw_handles [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1505.794033] env[62476]: DEBUG oslo_vmware.rw_handles [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1f61c7b0-b74d-4ad4-987a-2393b20f8c50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1506.275978] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1506.276235] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1506.276424] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Deleting the datastore file [datastore1] eca46087-33a7-4e9d-a7ce-6094886704a1 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1506.276740] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbadc921-78df-48c2-802f-a162db61ab03 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.285581] env[62476]: DEBUG oslo_vmware.api [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Waiting for the task: (returnval){ [ 1506.285581] env[62476]: value = "task-4319134" [ 1506.285581] env[62476]: _type = "Task" [ 1506.285581] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.297521] env[62476]: DEBUG oslo_vmware.api [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Task: {'id': task-4319134, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.796648] env[62476]: DEBUG oslo_vmware.api [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Task: {'id': task-4319134, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077083} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.797050] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1506.797100] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1506.797252] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1506.797428] env[62476]: INFO nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Took 1.74 seconds to destroy the instance on the hypervisor. [ 1506.799730] env[62476]: DEBUG nova.compute.claims [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1506.799901] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.800162] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.080859] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031825fc-72be-4011-987f-cd44d42d5383 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.088756] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2140f71-cc03-432a-aa2d-baeb5f6199ae {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.124057] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7a58bb-0647-4f77-a86e-e38d86972470 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.132193] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce8f5d8-31ab-46ce-84f2-3b7350935295 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.145963] env[62476]: DEBUG nova.compute.provider_tree [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1507.154881] env[62476]: DEBUG nova.scheduler.client.report [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1507.171467] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.371s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.172014] env[62476]: ERROR nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1507.172014] env[62476]: Faults: ['InvalidArgument'] [ 1507.172014] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Traceback (most recent call last): [ 1507.172014] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1507.172014] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] self.driver.spawn(context, instance, image_meta, [ 1507.172014] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1507.172014] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1507.172014] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1507.172014] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] self._fetch_image_if_missing(context, vi) [ 1507.172014] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1507.172014] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] image_cache(vi, tmp_image_ds_loc) [ 1507.172014] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1507.172371] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] vm_util.copy_virtual_disk( [ 1507.172371] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1507.172371] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] session._wait_for_task(vmdk_copy_task) [ 1507.172371] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1507.172371] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] return self.wait_for_task(task_ref) [ 1507.172371] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1507.172371] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] return evt.wait() [ 1507.172371] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1507.172371] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] result = hub.switch() [ 1507.172371] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1507.172371] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] return self.greenlet.switch() [ 1507.172371] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1507.172371] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] self.f(*self.args, **self.kw) [ 1507.172672] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1507.172672] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] raise exceptions.translate_fault(task_info.error) [ 1507.172672] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1507.172672] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Faults: ['InvalidArgument'] [ 1507.172672] env[62476]: ERROR nova.compute.manager [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] [ 1507.172787] env[62476]: DEBUG nova.compute.utils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1507.174509] env[62476]: DEBUG nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Build of instance eca46087-33a7-4e9d-a7ce-6094886704a1 was re-scheduled: A specified parameter was not correct: fileType [ 1507.174509] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1507.174878] env[62476]: DEBUG nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1507.175063] env[62476]: DEBUG nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1507.175239] env[62476]: DEBUG nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1507.175405] env[62476]: DEBUG nova.network.neutron [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1507.513912] env[62476]: DEBUG nova.network.neutron [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.532020] env[62476]: INFO nova.compute.manager [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Took 0.35 seconds to deallocate network for instance. [ 1507.652018] env[62476]: INFO nova.scheduler.client.report [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Deleted allocations for instance eca46087-33a7-4e9d-a7ce-6094886704a1 [ 1507.695648] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ccd2a57-647d-4eef-afaf-a02eea54c72e tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "eca46087-33a7-4e9d-a7ce-6094886704a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 633.523s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.696881] env[62476]: DEBUG oslo_concurrency.lockutils [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "eca46087-33a7-4e9d-a7ce-6094886704a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.193s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.697147] env[62476]: DEBUG oslo_concurrency.lockutils [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquiring lock "eca46087-33a7-4e9d-a7ce-6094886704a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1507.697483] env[62476]: DEBUG oslo_concurrency.lockutils [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "eca46087-33a7-4e9d-a7ce-6094886704a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.698232] env[62476]: DEBUG oslo_concurrency.lockutils [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "eca46087-33a7-4e9d-a7ce-6094886704a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.700557] env[62476]: INFO nova.compute.manager [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Terminating instance [ 1507.702319] env[62476]: DEBUG oslo_concurrency.lockutils [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquiring lock "refresh_cache-eca46087-33a7-4e9d-a7ce-6094886704a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1507.702488] env[62476]: DEBUG oslo_concurrency.lockutils [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Acquired lock "refresh_cache-eca46087-33a7-4e9d-a7ce-6094886704a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1507.702656] env[62476]: DEBUG nova.network.neutron [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1507.720498] env[62476]: DEBUG nova.compute.manager [None req-482d18b2-e12d-4680-b5e4-47b7b3d0eb36 tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: e95a41ff-af11-48ac-8245-c70eb0a73c7e] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1507.748878] env[62476]: DEBUG nova.compute.manager [None req-482d18b2-e12d-4680-b5e4-47b7b3d0eb36 tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: e95a41ff-af11-48ac-8245-c70eb0a73c7e] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1507.758668] env[62476]: DEBUG nova.network.neutron [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1507.772716] env[62476]: DEBUG oslo_concurrency.lockutils [None req-482d18b2-e12d-4680-b5e4-47b7b3d0eb36 tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Lock "e95a41ff-af11-48ac-8245-c70eb0a73c7e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.194s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.790081] env[62476]: DEBUG nova.compute.manager [None req-00a51f3a-7942-4732-b6bb-71bbe42cd8c2 tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] [instance: 6a5ba30d-8a44-49bb-b061-fadd99dc4d4e] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1507.818473] env[62476]: DEBUG nova.compute.manager [None req-00a51f3a-7942-4732-b6bb-71bbe42cd8c2 tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] [instance: 6a5ba30d-8a44-49bb-b061-fadd99dc4d4e] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1507.845461] env[62476]: DEBUG oslo_concurrency.lockutils [None req-00a51f3a-7942-4732-b6bb-71bbe42cd8c2 tempest-AttachVolumeNegativeTest-424705412 tempest-AttachVolumeNegativeTest-424705412-project-member] Lock "6a5ba30d-8a44-49bb-b061-fadd99dc4d4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.803s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.855747] env[62476]: DEBUG nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1507.900611] env[62476]: DEBUG nova.network.neutron [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.913830] env[62476]: DEBUG oslo_concurrency.lockutils [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Releasing lock "refresh_cache-eca46087-33a7-4e9d-a7ce-6094886704a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1507.914254] env[62476]: DEBUG nova.compute.manager [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1507.914451] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1507.914953] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ab84496b-85a2-4b59-9e78-14a96f24c1ce {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.924805] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd09cad-9abe-418e-ad07-dcadcd0e3b8f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.936220] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1507.936450] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.937952] env[62476]: INFO nova.compute.claims [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1507.958924] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eca46087-33a7-4e9d-a7ce-6094886704a1 could not be found. [ 1507.959131] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1507.959311] env[62476]: INFO nova.compute.manager [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1507.959563] env[62476]: DEBUG oslo.service.loopingcall [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1507.959788] env[62476]: DEBUG nova.compute.manager [-] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1507.959884] env[62476]: DEBUG nova.network.neutron [-] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1507.982129] env[62476]: DEBUG nova.network.neutron [-] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1507.991851] env[62476]: DEBUG nova.network.neutron [-] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.000372] env[62476]: INFO nova.compute.manager [-] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] Took 0.04 seconds to deallocate network for instance. [ 1508.100310] env[62476]: DEBUG oslo_concurrency.lockutils [None req-599adca5-eacf-4c95-97d1-25d2acc23e93 tempest-MultipleCreateTestJSON-1551640296 tempest-MultipleCreateTestJSON-1551640296-project-member] Lock "eca46087-33a7-4e9d-a7ce-6094886704a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.403s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.101419] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "eca46087-33a7-4e9d-a7ce-6094886704a1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 301.665s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.101675] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: eca46087-33a7-4e9d-a7ce-6094886704a1] During sync_power_state the instance has a pending task (deleting). Skip. [ 1508.101860] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "eca46087-33a7-4e9d-a7ce-6094886704a1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.212330] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fbe90a-67d2-4b5c-b186-86062ade8849 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.220876] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155a0a8f-539b-4704-b40c-65d9d6be5201 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.264274] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2276a187-33a3-4ea3-a8d3-2c6adb381ba3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.272558] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e273ae-ecef-4e15-b078-66f2ab4b8116 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.287198] env[62476]: DEBUG nova.compute.provider_tree [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1508.297552] env[62476]: DEBUG nova.scheduler.client.report [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1508.313814] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.377s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.314322] env[62476]: DEBUG nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1508.354513] env[62476]: DEBUG nova.compute.utils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1508.355967] env[62476]: DEBUG nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1508.356153] env[62476]: DEBUG nova.network.neutron [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1508.366164] env[62476]: DEBUG nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1508.464024] env[62476]: DEBUG nova.policy [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9281b2dcb9c0440495b676e3291d6d92', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1bc32d84f43a439396eacf3e9da5ad7d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1508.476868] env[62476]: DEBUG nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1508.511526] env[62476]: DEBUG nova.virt.hardware [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1508.511791] env[62476]: DEBUG nova.virt.hardware [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1508.511949] env[62476]: DEBUG nova.virt.hardware [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1508.512145] env[62476]: DEBUG nova.virt.hardware [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1508.512294] env[62476]: DEBUG nova.virt.hardware [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1508.512444] env[62476]: DEBUG nova.virt.hardware [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1508.513175] env[62476]: DEBUG nova.virt.hardware [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1508.513394] env[62476]: DEBUG nova.virt.hardware [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1508.513577] env[62476]: DEBUG nova.virt.hardware [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1508.513774] env[62476]: DEBUG nova.virt.hardware [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1508.513958] env[62476]: DEBUG nova.virt.hardware [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1508.515154] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8b164b-db49-456d-b328-a7ccf6c38320 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.524220] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7b1caf-7333-4390-9691-d262ffe1789d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.040788] env[62476]: DEBUG nova.network.neutron [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Successfully created port: a575c70b-23c1-4f51-8807-ac5b51ba4eb1 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1509.802854] env[62476]: DEBUG nova.compute.manager [req-439bc00b-9c20-4aab-8891-efbdfb1d5444 req-41e809d9-1403-4337-a50e-d25a45ac0ad1 service nova] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Received event network-vif-plugged-a575c70b-23c1-4f51-8807-ac5b51ba4eb1 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1509.803082] env[62476]: DEBUG oslo_concurrency.lockutils [req-439bc00b-9c20-4aab-8891-efbdfb1d5444 req-41e809d9-1403-4337-a50e-d25a45ac0ad1 service nova] Acquiring lock "27737774-efb5-4aee-a0c0-695e78a15dd6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.803298] env[62476]: DEBUG oslo_concurrency.lockutils [req-439bc00b-9c20-4aab-8891-efbdfb1d5444 req-41e809d9-1403-4337-a50e-d25a45ac0ad1 service nova] Lock "27737774-efb5-4aee-a0c0-695e78a15dd6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.803469] env[62476]: DEBUG oslo_concurrency.lockutils [req-439bc00b-9c20-4aab-8891-efbdfb1d5444 req-41e809d9-1403-4337-a50e-d25a45ac0ad1 service nova] Lock "27737774-efb5-4aee-a0c0-695e78a15dd6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.803803] env[62476]: DEBUG nova.compute.manager [req-439bc00b-9c20-4aab-8891-efbdfb1d5444 req-41e809d9-1403-4337-a50e-d25a45ac0ad1 service nova] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] No waiting events found dispatching network-vif-plugged-a575c70b-23c1-4f51-8807-ac5b51ba4eb1 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1509.803803] env[62476]: WARNING nova.compute.manager [req-439bc00b-9c20-4aab-8891-efbdfb1d5444 req-41e809d9-1403-4337-a50e-d25a45ac0ad1 service nova] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Received unexpected event network-vif-plugged-a575c70b-23c1-4f51-8807-ac5b51ba4eb1 for instance with vm_state building and task_state spawning. [ 1509.868295] env[62476]: DEBUG nova.network.neutron [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Successfully updated port: a575c70b-23c1-4f51-8807-ac5b51ba4eb1 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1509.878866] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "refresh_cache-27737774-efb5-4aee-a0c0-695e78a15dd6" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1509.878954] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquired lock "refresh_cache-27737774-efb5-4aee-a0c0-695e78a15dd6" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1509.879150] env[62476]: DEBUG nova.network.neutron [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1509.942408] env[62476]: DEBUG nova.network.neutron [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1510.124386] env[62476]: DEBUG nova.network.neutron [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Updating instance_info_cache with network_info: [{"id": "a575c70b-23c1-4f51-8807-ac5b51ba4eb1", "address": "fa:16:3e:64:ce:a7", "network": {"id": "87b70a9c-f6b7-4743-911a-9e2d0a108ca8", "bridge": "br-int", "label": "tempest-ServersTestJSON-742676103-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bc32d84f43a439396eacf3e9da5ad7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa575c70b-23", "ovs_interfaceid": "a575c70b-23c1-4f51-8807-ac5b51ba4eb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.139843] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Releasing lock "refresh_cache-27737774-efb5-4aee-a0c0-695e78a15dd6" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.140176] env[62476]: DEBUG nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Instance network_info: |[{"id": "a575c70b-23c1-4f51-8807-ac5b51ba4eb1", "address": "fa:16:3e:64:ce:a7", "network": {"id": "87b70a9c-f6b7-4743-911a-9e2d0a108ca8", "bridge": "br-int", "label": "tempest-ServersTestJSON-742676103-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bc32d84f43a439396eacf3e9da5ad7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa575c70b-23", "ovs_interfaceid": "a575c70b-23c1-4f51-8807-ac5b51ba4eb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1510.140629] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:ce:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a575c70b-23c1-4f51-8807-ac5b51ba4eb1', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1510.147949] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Creating folder: Project (1bc32d84f43a439396eacf3e9da5ad7d). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1510.148576] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f560a2e6-ba94-4c8f-b666-3168b41b3e1e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.160790] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Created folder: Project (1bc32d84f43a439396eacf3e9da5ad7d) in parent group-v849485. [ 1510.160979] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Creating folder: Instances. Parent ref: group-v849560. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1510.161237] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1240127a-fb1e-44a2-94c4-2194255e5b1a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.171056] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Created folder: Instances in parent group-v849560. [ 1510.171317] env[62476]: DEBUG oslo.service.loopingcall [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1510.171512] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1510.171725] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-26cfc501-47f5-43c7-87c1-9eecfce98edd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.192932] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1510.192932] env[62476]: value = "task-4319137" [ 1510.192932] env[62476]: _type = "Task" [ 1510.192932] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.200855] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319137, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.702456] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319137, 'name': CreateVM_Task, 'duration_secs': 0.337707} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.702456] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1510.703057] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.703231] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.703545] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1510.703799] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19db994b-7132-40a1-b140-6f017f9825b3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.708643] env[62476]: DEBUG oslo_vmware.api [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Waiting for the task: (returnval){ [ 1510.708643] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]527df08b-0d90-4cc0-8821-d4574f29de9a" [ 1510.708643] env[62476]: _type = "Task" [ 1510.708643] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.717159] env[62476]: DEBUG oslo_vmware.api [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]527df08b-0d90-4cc0-8821-d4574f29de9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.219466] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.219847] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1511.219957] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.833075] env[62476]: DEBUG nova.compute.manager [req-5496632a-e490-4504-b588-850dec273917 req-1f610668-b752-49a1-bc39-d27e0cf70f06 service nova] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Received event network-changed-a575c70b-23c1-4f51-8807-ac5b51ba4eb1 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1511.833290] env[62476]: DEBUG nova.compute.manager [req-5496632a-e490-4504-b588-850dec273917 req-1f610668-b752-49a1-bc39-d27e0cf70f06 service nova] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Refreshing instance network info cache due to event network-changed-a575c70b-23c1-4f51-8807-ac5b51ba4eb1. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1511.833503] env[62476]: DEBUG oslo_concurrency.lockutils [req-5496632a-e490-4504-b588-850dec273917 req-1f610668-b752-49a1-bc39-d27e0cf70f06 service nova] Acquiring lock "refresh_cache-27737774-efb5-4aee-a0c0-695e78a15dd6" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.833647] env[62476]: DEBUG oslo_concurrency.lockutils [req-5496632a-e490-4504-b588-850dec273917 req-1f610668-b752-49a1-bc39-d27e0cf70f06 service nova] Acquired lock "refresh_cache-27737774-efb5-4aee-a0c0-695e78a15dd6" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.833805] env[62476]: DEBUG nova.network.neutron [req-5496632a-e490-4504-b588-850dec273917 req-1f610668-b752-49a1-bc39-d27e0cf70f06 service nova] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Refreshing network info cache for port a575c70b-23c1-4f51-8807-ac5b51ba4eb1 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1512.150108] env[62476]: DEBUG nova.network.neutron [req-5496632a-e490-4504-b588-850dec273917 req-1f610668-b752-49a1-bc39-d27e0cf70f06 service nova] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Updated VIF entry in instance network info cache for port a575c70b-23c1-4f51-8807-ac5b51ba4eb1. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1512.150494] env[62476]: DEBUG nova.network.neutron [req-5496632a-e490-4504-b588-850dec273917 req-1f610668-b752-49a1-bc39-d27e0cf70f06 service nova] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Updating instance_info_cache with network_info: [{"id": "a575c70b-23c1-4f51-8807-ac5b51ba4eb1", "address": "fa:16:3e:64:ce:a7", "network": {"id": "87b70a9c-f6b7-4743-911a-9e2d0a108ca8", "bridge": "br-int", "label": "tempest-ServersTestJSON-742676103-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bc32d84f43a439396eacf3e9da5ad7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa575c70b-23", "ovs_interfaceid": "a575c70b-23c1-4f51-8807-ac5b51ba4eb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.161646] env[62476]: DEBUG oslo_concurrency.lockutils [req-5496632a-e490-4504-b588-850dec273917 req-1f610668-b752-49a1-bc39-d27e0cf70f06 service nova] Releasing lock "refresh_cache-27737774-efb5-4aee-a0c0-695e78a15dd6" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.088757] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.088757] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.145710] env[62476]: DEBUG oslo_concurrency.lockutils [None req-72828090-6104-4f7c-aeab-7193d1520870 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "27737774-efb5-4aee-a0c0-695e78a15dd6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.038653] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.050017] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.050293] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.050669] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.050873] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1534.052084] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808b21b4-429b-4608-aeef-ccd0eac7b732 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.061867] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15eaa840-91ad-4e8b-9d3c-42846417bf2d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.078282] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef931450-306b-4053-b5a8-d647e90723d3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.087622] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1b35ae-1ffd-4b17-adf2-f62eb2d44ecf {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.120532] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180693MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1534.120709] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.120918] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.238261] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3cdef023-ce78-4c3b-8476-5508c18204c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1534.238261] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance bfd1d3fe-c8ba-4b77-b633-f77010674954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1534.238261] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1534.238261] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4954bf5d-20db-4787-91b5-a990ed30cdf3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1534.238390] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7211a8c4-5430-4b0c-86e7-8101ed71463e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1534.238390] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6f133a49-bb62-45c6-a014-a2f99766d092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1534.238390] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a0490305-7494-4612-843f-bac04dd0f328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1534.238390] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 139391d4-af04-4053-801a-792fc4fd724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1534.238496] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 003e332b-9765-4db7-9f48-40d33c6532d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1534.238496] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 27737774-efb5-4aee-a0c0-695e78a15dd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1534.258434] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance fe895d70-4c56-4854-83bf-a66cc1623d59 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1534.270308] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance c7e551af-a94e-48da-a725-53ebd73d43ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1534.280648] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ea606214-a34b-4972-8948-a6ff8c55b889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1534.293881] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1d67c106-ced2-4b4e-8abd-1652bd0509d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1534.293881] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1534.293881] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1534.523027] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26629273-022c-4860-b21b-e03cd209d514 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.530557] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128a22ec-09c6-4b41-828d-239e41e427f1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.565561] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91adbed-b1d7-401b-b3d6-27248db97dcb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.575071] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24314029-206a-4365-b95e-f99a025a4306 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.589940] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1534.599333] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1534.615755] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1534.616249] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.495s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.000922] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6f3e1d22-462b-4879-b808-2e751082a43e tempest-ServersTestMultiNic-944024043 tempest-ServersTestMultiNic-944024043-project-member] Acquiring lock "bd7629be-54eb-4a22-a601-8ffa8ec5d4f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.001260] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6f3e1d22-462b-4879-b808-2e751082a43e tempest-ServersTestMultiNic-944024043 tempest-ServersTestMultiNic-944024043-project-member] Lock "bd7629be-54eb-4a22-a601-8ffa8ec5d4f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.604319] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1537.604687] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1537.604687] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1537.627648] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1537.627813] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1537.627944] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1537.628081] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1537.628218] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1537.628414] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1537.628628] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a0490305-7494-4612-843f-bac04dd0f328] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1537.628805] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1537.628992] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1537.629197] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1537.629353] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1538.026698] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1538.026911] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1538.027027] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1539.027088] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1541.028338] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1542.027383] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1544.024228] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1545.027644] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1552.023592] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1553.028615] env[62476]: WARNING oslo_vmware.rw_handles [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1553.028615] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1553.028615] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1553.028615] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1553.028615] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1553.028615] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1553.028615] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1553.028615] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1553.028615] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1553.028615] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1553.028615] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1553.028615] env[62476]: ERROR oslo_vmware.rw_handles [ 1553.029361] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/1f61c7b0-b74d-4ad4-987a-2393b20f8c50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1553.031565] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1553.031816] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Copying Virtual Disk [datastore1] vmware_temp/1f61c7b0-b74d-4ad4-987a-2393b20f8c50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/1f61c7b0-b74d-4ad4-987a-2393b20f8c50/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1553.032126] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1bda499-f97c-466a-8063-8484a7228a08 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.041211] env[62476]: DEBUG oslo_vmware.api [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Waiting for the task: (returnval){ [ 1553.041211] env[62476]: value = "task-4319138" [ 1553.041211] env[62476]: _type = "Task" [ 1553.041211] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.049827] env[62476]: DEBUG oslo_vmware.api [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Task: {'id': task-4319138, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.551690] env[62476]: DEBUG oslo_vmware.exceptions [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1553.551984] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.552696] env[62476]: ERROR nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1553.552696] env[62476]: Faults: ['InvalidArgument'] [ 1553.552696] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Traceback (most recent call last): [ 1553.552696] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1553.552696] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] yield resources [ 1553.552696] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1553.552696] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] self.driver.spawn(context, instance, image_meta, [ 1553.552696] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1553.552696] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1553.552696] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1553.552696] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] self._fetch_image_if_missing(context, vi) [ 1553.552696] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1553.553097] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] image_cache(vi, tmp_image_ds_loc) [ 1553.553097] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1553.553097] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] vm_util.copy_virtual_disk( [ 1553.553097] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1553.553097] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] session._wait_for_task(vmdk_copy_task) [ 1553.553097] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1553.553097] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] return self.wait_for_task(task_ref) [ 1553.553097] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1553.553097] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] return evt.wait() [ 1553.553097] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1553.553097] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] result = hub.switch() [ 1553.553097] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1553.553097] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] return self.greenlet.switch() [ 1553.553520] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1553.553520] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] self.f(*self.args, **self.kw) [ 1553.553520] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1553.553520] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] raise exceptions.translate_fault(task_info.error) [ 1553.553520] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1553.553520] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Faults: ['InvalidArgument'] [ 1553.553520] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] [ 1553.553520] env[62476]: INFO nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Terminating instance [ 1553.554622] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.554858] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1553.555123] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c30b7009-0360-47f8-8fa9-6e599477b244 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.558741] env[62476]: DEBUG nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1553.558979] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1553.559760] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09171d9-2493-455d-ad66-2af2182afb69 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.563910] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1553.564101] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1553.566559] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b17ba36-73f5-4218-84cc-f60b47ee43ac {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.568712] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1553.568934] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-182e3ee1-aab3-49b2-9ab5-00b998a07da7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.573010] env[62476]: DEBUG oslo_vmware.api [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Waiting for the task: (returnval){ [ 1553.573010] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52b6509a-1cd3-2f64-f3ca-a4bb8eb0b581" [ 1553.573010] env[62476]: _type = "Task" [ 1553.573010] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.580884] env[62476]: DEBUG oslo_vmware.api [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52b6509a-1cd3-2f64-f3ca-a4bb8eb0b581, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.638378] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1553.638596] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1553.638791] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Deleting the datastore file [datastore1] 3cdef023-ce78-4c3b-8476-5508c18204c2 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1553.639068] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-461d541e-ee1e-4612-be20-fdb3c6cf0653 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.645811] env[62476]: DEBUG oslo_vmware.api [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Waiting for the task: (returnval){ [ 1553.645811] env[62476]: value = "task-4319140" [ 1553.645811] env[62476]: _type = "Task" [ 1553.645811] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.654519] env[62476]: DEBUG oslo_vmware.api [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Task: {'id': task-4319140, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.083483] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1554.083780] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Creating directory with path [datastore1] vmware_temp/783d815b-507e-4fe6-bbde-a23366b30579/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1554.083983] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a6f377d-7a5f-4999-8b5e-0c4db720d566 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.095645] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Created directory with path [datastore1] vmware_temp/783d815b-507e-4fe6-bbde-a23366b30579/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1554.095852] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Fetch image to [datastore1] vmware_temp/783d815b-507e-4fe6-bbde-a23366b30579/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1554.096033] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/783d815b-507e-4fe6-bbde-a23366b30579/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1554.096785] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5997aef-4335-4f48-b873-d9fc547df3a5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.103971] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81d825d-7613-489d-8a75-12f597033d30 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.114160] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62958c02-9991-4d0c-bcab-79e5aac30add {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.144636] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b0fdd1-e138-4f8b-8c2f-ffd22a8f6151 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.156009] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cd6a2fac-e8f7-4ad0-a6db-2afd57cbe30d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.157761] env[62476]: DEBUG oslo_vmware.api [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Task: {'id': task-4319140, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077017} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.158016] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1554.158220] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1554.158391] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1554.158562] env[62476]: INFO nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1554.161150] env[62476]: DEBUG nova.compute.claims [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1554.161326] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.161540] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.180311] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1554.239636] env[62476]: DEBUG oslo_vmware.rw_handles [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/783d815b-507e-4fe6-bbde-a23366b30579/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1554.298487] env[62476]: DEBUG oslo_vmware.rw_handles [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1554.298680] env[62476]: DEBUG oslo_vmware.rw_handles [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/783d815b-507e-4fe6-bbde-a23366b30579/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1554.459760] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa623986-e745-478b-b51a-0224f1d631d0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.468070] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ab0580-9bc8-464a-9bfe-b031248f2b6d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.498860] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64ba9d2-4780-41c9-ba9c-7c3c002eb604 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.506557] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74feaa52-fde5-4abc-a3cf-7f911111d0f1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.519999] env[62476]: DEBUG nova.compute.provider_tree [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1554.528739] env[62476]: DEBUG nova.scheduler.client.report [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1554.543183] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.381s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.543689] env[62476]: ERROR nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1554.543689] env[62476]: Faults: ['InvalidArgument'] [ 1554.543689] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Traceback (most recent call last): [ 1554.543689] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1554.543689] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] self.driver.spawn(context, instance, image_meta, [ 1554.543689] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1554.543689] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1554.543689] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1554.543689] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] self._fetch_image_if_missing(context, vi) [ 1554.543689] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1554.543689] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] image_cache(vi, tmp_image_ds_loc) [ 1554.543689] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1554.544020] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] vm_util.copy_virtual_disk( [ 1554.544020] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1554.544020] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] session._wait_for_task(vmdk_copy_task) [ 1554.544020] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1554.544020] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] return self.wait_for_task(task_ref) [ 1554.544020] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1554.544020] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] return evt.wait() [ 1554.544020] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1554.544020] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] result = hub.switch() [ 1554.544020] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1554.544020] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] return self.greenlet.switch() [ 1554.544020] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1554.544020] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] self.f(*self.args, **self.kw) [ 1554.544362] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1554.544362] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] raise exceptions.translate_fault(task_info.error) [ 1554.544362] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1554.544362] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Faults: ['InvalidArgument'] [ 1554.544362] env[62476]: ERROR nova.compute.manager [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] [ 1554.544504] env[62476]: DEBUG nova.compute.utils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1554.546314] env[62476]: DEBUG nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Build of instance 3cdef023-ce78-4c3b-8476-5508c18204c2 was re-scheduled: A specified parameter was not correct: fileType [ 1554.546314] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1554.546696] env[62476]: DEBUG nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1554.546873] env[62476]: DEBUG nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1554.547062] env[62476]: DEBUG nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1554.547231] env[62476]: DEBUG nova.network.neutron [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1554.962444] env[62476]: DEBUG nova.network.neutron [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.973995] env[62476]: INFO nova.compute.manager [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Took 0.43 seconds to deallocate network for instance. [ 1555.079258] env[62476]: INFO nova.scheduler.client.report [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Deleted allocations for instance 3cdef023-ce78-4c3b-8476-5508c18204c2 [ 1555.102262] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ebcff62e-2420-4d6a-a25b-1b3ae5c0c3a7 tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Lock "3cdef023-ce78-4c3b-8476-5508c18204c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 573.948s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.103567] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ef4a9394-5715-4efc-9088-813cea61316e tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Lock "3cdef023-ce78-4c3b-8476-5508c18204c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 378.452s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.103806] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ef4a9394-5715-4efc-9088-813cea61316e tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Acquiring lock "3cdef023-ce78-4c3b-8476-5508c18204c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.104030] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ef4a9394-5715-4efc-9088-813cea61316e tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Lock "3cdef023-ce78-4c3b-8476-5508c18204c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.104203] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ef4a9394-5715-4efc-9088-813cea61316e tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Lock "3cdef023-ce78-4c3b-8476-5508c18204c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.106542] env[62476]: INFO nova.compute.manager [None req-ef4a9394-5715-4efc-9088-813cea61316e tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Terminating instance [ 1555.108377] env[62476]: DEBUG nova.compute.manager [None req-ef4a9394-5715-4efc-9088-813cea61316e tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1555.108571] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ef4a9394-5715-4efc-9088-813cea61316e tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1555.109095] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-090eb524-3c12-400e-a4be-1ca95fcd5184 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.115477] env[62476]: DEBUG nova.compute.manager [None req-d92df672-b8eb-4c46-83ec-f3bd473e0f9a tempest-ServersTestMultiNic-944024043 tempest-ServersTestMultiNic-944024043-project-member] [instance: f082523d-622b-4d64-b15f-a8511261f4b8] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1555.122486] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220790f8-e85b-49fd-919f-eef404fdbd3a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.142921] env[62476]: DEBUG nova.compute.manager [None req-d92df672-b8eb-4c46-83ec-f3bd473e0f9a tempest-ServersTestMultiNic-944024043 tempest-ServersTestMultiNic-944024043-project-member] [instance: f082523d-622b-4d64-b15f-a8511261f4b8] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1555.154179] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-ef4a9394-5715-4efc-9088-813cea61316e tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3cdef023-ce78-4c3b-8476-5508c18204c2 could not be found. [ 1555.154420] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ef4a9394-5715-4efc-9088-813cea61316e tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1555.154604] env[62476]: INFO nova.compute.manager [None req-ef4a9394-5715-4efc-9088-813cea61316e tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1555.154857] env[62476]: DEBUG oslo.service.loopingcall [None req-ef4a9394-5715-4efc-9088-813cea61316e tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1555.155333] env[62476]: DEBUG nova.compute.manager [-] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1555.155440] env[62476]: DEBUG nova.network.neutron [-] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1555.177690] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d92df672-b8eb-4c46-83ec-f3bd473e0f9a tempest-ServersTestMultiNic-944024043 tempest-ServersTestMultiNic-944024043-project-member] Lock "f082523d-622b-4d64-b15f-a8511261f4b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.046s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.183973] env[62476]: DEBUG nova.network.neutron [-] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.189133] env[62476]: DEBUG nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1555.192382] env[62476]: INFO nova.compute.manager [-] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] Took 0.04 seconds to deallocate network for instance. [ 1555.273249] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.273513] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.275174] env[62476]: INFO nova.compute.claims [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1555.295652] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ef4a9394-5715-4efc-9088-813cea61316e tempest-ServerActionsTestOtherB-1300393245 tempest-ServerActionsTestOtherB-1300393245-project-member] Lock "3cdef023-ce78-4c3b-8476-5508c18204c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.297059] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "3cdef023-ce78-4c3b-8476-5508c18204c2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 348.861s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.297059] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3cdef023-ce78-4c3b-8476-5508c18204c2] During sync_power_state the instance has a pending task (deleting). Skip. [ 1555.297059] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "3cdef023-ce78-4c3b-8476-5508c18204c2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.511441] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e0607f-c039-4434-8fe7-73157fa07f5a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.519450] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736faf93-cb81-4e7b-8752-84a02caeae60 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.548796] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405488d7-e6bc-49be-a936-0e7a83b81cf2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.555926] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c679f8bf-c3d2-493e-9cd3-d05536796a11 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.568969] env[62476]: DEBUG nova.compute.provider_tree [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1555.578822] env[62476]: DEBUG nova.scheduler.client.report [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1555.592469] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.319s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.592928] env[62476]: DEBUG nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1555.627095] env[62476]: DEBUG nova.compute.utils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1555.628780] env[62476]: DEBUG nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1555.628780] env[62476]: DEBUG nova.network.neutron [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1555.637719] env[62476]: DEBUG nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1555.703139] env[62476]: DEBUG nova.policy [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8e09bc7d8b124150b546c3ed12a2306b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '371e27a2809343e086ecc898ba5b9ff6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1555.715558] env[62476]: DEBUG nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1555.741590] env[62476]: DEBUG nova.virt.hardware [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1555.741838] env[62476]: DEBUG nova.virt.hardware [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1555.741997] env[62476]: DEBUG nova.virt.hardware [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1555.742196] env[62476]: DEBUG nova.virt.hardware [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1555.742357] env[62476]: DEBUG nova.virt.hardware [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1555.742490] env[62476]: DEBUG nova.virt.hardware [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1555.742696] env[62476]: DEBUG nova.virt.hardware [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1555.742941] env[62476]: DEBUG nova.virt.hardware [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1555.743130] env[62476]: DEBUG nova.virt.hardware [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1555.743295] env[62476]: DEBUG nova.virt.hardware [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1555.743468] env[62476]: DEBUG nova.virt.hardware [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1555.744789] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c306546-5114-4097-bbbc-9e93dc6d0d5d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.752847] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee407744-0d7c-4eae-86d5-971bdde4d123 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.058085] env[62476]: DEBUG nova.network.neutron [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Successfully created port: b55c7eb6-9d10-457d-89e2-b82e79660908 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1556.804104] env[62476]: DEBUG nova.network.neutron [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Successfully updated port: b55c7eb6-9d10-457d-89e2-b82e79660908 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1556.825245] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Acquiring lock "refresh_cache-fe895d70-4c56-4854-83bf-a66cc1623d59" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1556.825317] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Acquired lock "refresh_cache-fe895d70-4c56-4854-83bf-a66cc1623d59" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.825934] env[62476]: DEBUG nova.network.neutron [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1556.883757] env[62476]: DEBUG nova.network.neutron [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1557.009097] env[62476]: DEBUG nova.compute.manager [req-b337d5ff-598d-4fda-a0f3-137c1a819b14 req-65ae839e-2a27-42e8-925a-d2448b4456d0 service nova] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Received event network-vif-plugged-b55c7eb6-9d10-457d-89e2-b82e79660908 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1557.009343] env[62476]: DEBUG oslo_concurrency.lockutils [req-b337d5ff-598d-4fda-a0f3-137c1a819b14 req-65ae839e-2a27-42e8-925a-d2448b4456d0 service nova] Acquiring lock "fe895d70-4c56-4854-83bf-a66cc1623d59-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.009557] env[62476]: DEBUG oslo_concurrency.lockutils [req-b337d5ff-598d-4fda-a0f3-137c1a819b14 req-65ae839e-2a27-42e8-925a-d2448b4456d0 service nova] Lock "fe895d70-4c56-4854-83bf-a66cc1623d59-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.009826] env[62476]: DEBUG oslo_concurrency.lockutils [req-b337d5ff-598d-4fda-a0f3-137c1a819b14 req-65ae839e-2a27-42e8-925a-d2448b4456d0 service nova] Lock "fe895d70-4c56-4854-83bf-a66cc1623d59-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.010011] env[62476]: DEBUG nova.compute.manager [req-b337d5ff-598d-4fda-a0f3-137c1a819b14 req-65ae839e-2a27-42e8-925a-d2448b4456d0 service nova] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] No waiting events found dispatching network-vif-plugged-b55c7eb6-9d10-457d-89e2-b82e79660908 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1557.010403] env[62476]: WARNING nova.compute.manager [req-b337d5ff-598d-4fda-a0f3-137c1a819b14 req-65ae839e-2a27-42e8-925a-d2448b4456d0 service nova] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Received unexpected event network-vif-plugged-b55c7eb6-9d10-457d-89e2-b82e79660908 for instance with vm_state building and task_state spawning. [ 1557.010579] env[62476]: DEBUG nova.compute.manager [req-b337d5ff-598d-4fda-a0f3-137c1a819b14 req-65ae839e-2a27-42e8-925a-d2448b4456d0 service nova] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Received event network-changed-b55c7eb6-9d10-457d-89e2-b82e79660908 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1557.010812] env[62476]: DEBUG nova.compute.manager [req-b337d5ff-598d-4fda-a0f3-137c1a819b14 req-65ae839e-2a27-42e8-925a-d2448b4456d0 service nova] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Refreshing instance network info cache due to event network-changed-b55c7eb6-9d10-457d-89e2-b82e79660908. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1557.010986] env[62476]: DEBUG oslo_concurrency.lockutils [req-b337d5ff-598d-4fda-a0f3-137c1a819b14 req-65ae839e-2a27-42e8-925a-d2448b4456d0 service nova] Acquiring lock "refresh_cache-fe895d70-4c56-4854-83bf-a66cc1623d59" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.119508] env[62476]: DEBUG nova.network.neutron [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Updating instance_info_cache with network_info: [{"id": "b55c7eb6-9d10-457d-89e2-b82e79660908", "address": "fa:16:3e:47:a0:30", "network": {"id": "88c2f6ec-11ad-4cfb-842e-2742b4a21117", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-2098974515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "371e27a2809343e086ecc898ba5b9ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb55c7eb6-9d", "ovs_interfaceid": "b55c7eb6-9d10-457d-89e2-b82e79660908", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1557.135450] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Releasing lock "refresh_cache-fe895d70-4c56-4854-83bf-a66cc1623d59" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.135768] env[62476]: DEBUG nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Instance network_info: |[{"id": "b55c7eb6-9d10-457d-89e2-b82e79660908", "address": "fa:16:3e:47:a0:30", "network": {"id": "88c2f6ec-11ad-4cfb-842e-2742b4a21117", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-2098974515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "371e27a2809343e086ecc898ba5b9ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb55c7eb6-9d", "ovs_interfaceid": "b55c7eb6-9d10-457d-89e2-b82e79660908", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1557.136096] env[62476]: DEBUG oslo_concurrency.lockutils [req-b337d5ff-598d-4fda-a0f3-137c1a819b14 req-65ae839e-2a27-42e8-925a-d2448b4456d0 service nova] Acquired lock "refresh_cache-fe895d70-4c56-4854-83bf-a66cc1623d59" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.136516] env[62476]: DEBUG nova.network.neutron [req-b337d5ff-598d-4fda-a0f3-137c1a819b14 req-65ae839e-2a27-42e8-925a-d2448b4456d0 service nova] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Refreshing network info cache for port b55c7eb6-9d10-457d-89e2-b82e79660908 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1557.137678] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:a0:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f847601f-7479-48eb-842f-41f94eea8537', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b55c7eb6-9d10-457d-89e2-b82e79660908', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1557.145466] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Creating folder: Project (371e27a2809343e086ecc898ba5b9ff6). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1557.149112] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad084a77-10ba-4755-be46-0a02599433e6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.162149] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Created folder: Project (371e27a2809343e086ecc898ba5b9ff6) in parent group-v849485. [ 1557.162378] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Creating folder: Instances. Parent ref: group-v849563. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1557.162616] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-127892b1-22ee-48d0-978f-69c03e71c056 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.176143] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Created folder: Instances in parent group-v849563. [ 1557.176143] env[62476]: DEBUG oslo.service.loopingcall [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1557.176143] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1557.176143] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8418cfae-b5e1-48e8-a49c-0281cd522dfc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.199665] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1557.199665] env[62476]: value = "task-4319143" [ 1557.199665] env[62476]: _type = "Task" [ 1557.199665] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.208446] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319143, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.464748] env[62476]: DEBUG nova.network.neutron [req-b337d5ff-598d-4fda-a0f3-137c1a819b14 req-65ae839e-2a27-42e8-925a-d2448b4456d0 service nova] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Updated VIF entry in instance network info cache for port b55c7eb6-9d10-457d-89e2-b82e79660908. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1557.465183] env[62476]: DEBUG nova.network.neutron [req-b337d5ff-598d-4fda-a0f3-137c1a819b14 req-65ae839e-2a27-42e8-925a-d2448b4456d0 service nova] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Updating instance_info_cache with network_info: [{"id": "b55c7eb6-9d10-457d-89e2-b82e79660908", "address": "fa:16:3e:47:a0:30", "network": {"id": "88c2f6ec-11ad-4cfb-842e-2742b4a21117", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-2098974515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "371e27a2809343e086ecc898ba5b9ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f847601f-7479-48eb-842f-41f94eea8537", "external-id": "nsx-vlan-transportzone-35", "segmentation_id": 35, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb55c7eb6-9d", "ovs_interfaceid": "b55c7eb6-9d10-457d-89e2-b82e79660908", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1557.477233] env[62476]: DEBUG oslo_concurrency.lockutils [req-b337d5ff-598d-4fda-a0f3-137c1a819b14 req-65ae839e-2a27-42e8-925a-d2448b4456d0 service nova] Releasing lock "refresh_cache-fe895d70-4c56-4854-83bf-a66cc1623d59" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.709327] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319143, 'name': CreateVM_Task, 'duration_secs': 0.290288} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.709492] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1557.710209] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.710379] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.710702] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1557.710981] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-563d919b-d21a-43c2-8f29-1a2770177313 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.715461] env[62476]: DEBUG oslo_vmware.api [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Waiting for the task: (returnval){ [ 1557.715461] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]529a5973-4ca4-0b8b-f83e-97a2b11486e5" [ 1557.715461] env[62476]: _type = "Task" [ 1557.715461] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.724412] env[62476]: DEBUG oslo_vmware.api [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]529a5973-4ca4-0b8b-f83e-97a2b11486e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.226522] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.226879] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1558.226991] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.281794] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab80b760-adff-4d39-b07c-8370d22cd33c tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Acquiring lock "fe895d70-4c56-4854-83bf-a66cc1623d59" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.028373] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1595.061527] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.061527] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.061744] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1595.061880] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1595.063271] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc05cb7a-f0bf-4550-9ad0-e27f2a3cc401 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.072768] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426d1dca-2253-46dc-a205-916409b8d2cc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.087893] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b87004b-bf58-44d0-94d2-f0342feef643 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.095219] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca49cb13-5f15-41da-a806-beb12f4ac305 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.126465] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180708MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1595.126654] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.126701] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.241200] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance bfd1d3fe-c8ba-4b77-b633-f77010674954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1595.241374] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1595.241505] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4954bf5d-20db-4787-91b5-a990ed30cdf3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1595.241629] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7211a8c4-5430-4b0c-86e7-8101ed71463e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1595.241750] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6f133a49-bb62-45c6-a014-a2f99766d092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1595.241869] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a0490305-7494-4612-843f-bac04dd0f328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1595.241990] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 139391d4-af04-4053-801a-792fc4fd724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1595.242122] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 003e332b-9765-4db7-9f48-40d33c6532d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1595.242242] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 27737774-efb5-4aee-a0c0-695e78a15dd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1595.242381] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance fe895d70-4c56-4854-83bf-a66cc1623d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1595.254587] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance c7e551af-a94e-48da-a725-53ebd73d43ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1595.268961] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ea606214-a34b-4972-8948-a6ff8c55b889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1595.280424] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1d67c106-ced2-4b4e-8abd-1652bd0509d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1595.291982] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance bd7629be-54eb-4a22-a601-8ffa8ec5d4f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1595.292353] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1595.292512] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1595.474589] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef4c44b-031d-4eff-a26f-7ef47c73586f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.482574] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83aa1d3-839b-42a4-ba61-795e1d53b839 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.512656] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3e41f7-e52f-462a-bfc1-fe66ea3cc6d0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.521332] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa030ce6-44ff-4d95-93d1-ea52bef16861 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.536953] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1595.544593] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1595.558554] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1595.558773] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.432s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.559661] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1598.559975] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1599.027577] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1599.027746] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1599.027872] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1599.048458] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1599.048744] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1599.048812] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1599.048937] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1599.049669] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1599.049669] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a0490305-7494-4612-843f-bac04dd0f328] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1599.049669] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1599.049669] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1599.049669] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1599.049948] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1599.049948] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1600.026691] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1600.425320] env[62476]: WARNING oslo_vmware.rw_handles [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1600.425320] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1600.425320] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1600.425320] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1600.425320] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1600.425320] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1600.425320] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1600.425320] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1600.425320] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1600.425320] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1600.425320] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1600.425320] env[62476]: ERROR oslo_vmware.rw_handles [ 1600.425814] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/783d815b-507e-4fe6-bbde-a23366b30579/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1600.428535] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1600.428786] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Copying Virtual Disk [datastore1] vmware_temp/783d815b-507e-4fe6-bbde-a23366b30579/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/783d815b-507e-4fe6-bbde-a23366b30579/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1600.429198] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65a3a0cb-bf6e-468b-8d3b-67c1bb55223e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.438068] env[62476]: DEBUG oslo_vmware.api [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Waiting for the task: (returnval){ [ 1600.438068] env[62476]: value = "task-4319144" [ 1600.438068] env[62476]: _type = "Task" [ 1600.438068] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.446545] env[62476]: DEBUG oslo_vmware.api [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Task: {'id': task-4319144, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.948927] env[62476]: DEBUG oslo_vmware.exceptions [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1600.949359] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1600.949934] env[62476]: ERROR nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1600.949934] env[62476]: Faults: ['InvalidArgument'] [ 1600.949934] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Traceback (most recent call last): [ 1600.949934] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1600.949934] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] yield resources [ 1600.949934] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1600.949934] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] self.driver.spawn(context, instance, image_meta, [ 1600.949934] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1600.949934] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1600.949934] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1600.949934] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] self._fetch_image_if_missing(context, vi) [ 1600.949934] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1600.950305] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] image_cache(vi, tmp_image_ds_loc) [ 1600.950305] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1600.950305] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] vm_util.copy_virtual_disk( [ 1600.950305] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1600.950305] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] session._wait_for_task(vmdk_copy_task) [ 1600.950305] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1600.950305] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] return self.wait_for_task(task_ref) [ 1600.950305] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1600.950305] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] return evt.wait() [ 1600.950305] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1600.950305] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] result = hub.switch() [ 1600.950305] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1600.950305] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] return self.greenlet.switch() [ 1600.950621] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1600.950621] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] self.f(*self.args, **self.kw) [ 1600.950621] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1600.950621] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] raise exceptions.translate_fault(task_info.error) [ 1600.950621] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1600.950621] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Faults: ['InvalidArgument'] [ 1600.950621] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] [ 1600.950621] env[62476]: INFO nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Terminating instance [ 1600.951909] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.952131] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1600.952387] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e9bb64f-9084-44b6-a7b1-38bc034cce1f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.954645] env[62476]: DEBUG nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1600.954904] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1600.955603] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704f1779-58f9-401f-aaa9-87a8f8ed84c9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.962690] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1600.962920] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6769d2d-748c-4c66-8f42-5c65789f53ef {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.965144] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1600.965321] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1600.966324] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d31ed32-619b-4954-bb11-5bf9098de8ae {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.971272] env[62476]: DEBUG oslo_vmware.api [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Waiting for the task: (returnval){ [ 1600.971272] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52896a3a-f502-10f1-3447-2c716d187f08" [ 1600.971272] env[62476]: _type = "Task" [ 1600.971272] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.981772] env[62476]: DEBUG oslo_vmware.api [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52896a3a-f502-10f1-3447-2c716d187f08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.026583] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1601.031549] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1601.031908] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1601.031953] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Deleting the datastore file [datastore1] bfd1d3fe-c8ba-4b77-b633-f77010674954 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1601.032239] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27ec65f1-5152-46a0-8251-15e7b1b5bcb3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.039135] env[62476]: DEBUG oslo_vmware.api [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Waiting for the task: (returnval){ [ 1601.039135] env[62476]: value = "task-4319146" [ 1601.039135] env[62476]: _type = "Task" [ 1601.039135] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.047485] env[62476]: DEBUG oslo_vmware.api [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Task: {'id': task-4319146, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.482609] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1601.482930] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Creating directory with path [datastore1] vmware_temp/455bed98-298e-4b9c-a809-bed3298fec68/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1601.483247] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-831c7048-955a-4d94-a6fd-674e3ff3c5a6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.495593] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Created directory with path [datastore1] vmware_temp/455bed98-298e-4b9c-a809-bed3298fec68/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1601.495856] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Fetch image to [datastore1] vmware_temp/455bed98-298e-4b9c-a809-bed3298fec68/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1601.496082] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/455bed98-298e-4b9c-a809-bed3298fec68/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1601.496865] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859ab4a9-54ea-4dc1-8cbc-0ae5e7043987 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.504313] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989c8dbb-0f31-45da-893d-0a4813ccb363 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.513770] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48fdeea8-6694-4028-9f54-add92ab7a95f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.548116] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621468e1-1608-4b49-8f81-d15a583c5571 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.555914] env[62476]: DEBUG oslo_vmware.api [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Task: {'id': task-4319146, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082943} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.557475] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1601.557673] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1601.557850] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1601.558061] env[62476]: INFO nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1601.559877] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2d4fa4fa-87ea-443b-a037-345fc12c4eaa {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.561918] env[62476]: DEBUG nova.compute.claims [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1601.562100] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.562321] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.589015] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1601.737741] env[62476]: DEBUG oslo_vmware.rw_handles [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/455bed98-298e-4b9c-a809-bed3298fec68/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1601.799281] env[62476]: DEBUG oslo_vmware.rw_handles [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1601.799483] env[62476]: DEBUG oslo_vmware.rw_handles [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/455bed98-298e-4b9c-a809-bed3298fec68/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1601.846926] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea709ba3-d0e8-4e57-862f-5985612c2314 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.855253] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e55308d-23fb-4fe9-b229-c75823bb60a6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.885576] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93d35ed-780e-4243-94bd-b25206fd7db3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.894036] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df65a5cc-c524-40fd-9c02-4281dcd62072 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.908421] env[62476]: DEBUG nova.compute.provider_tree [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1601.917583] env[62476]: DEBUG nova.scheduler.client.report [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1601.935839] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.373s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.935986] env[62476]: ERROR nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1601.935986] env[62476]: Faults: ['InvalidArgument'] [ 1601.935986] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Traceback (most recent call last): [ 1601.935986] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1601.935986] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] self.driver.spawn(context, instance, image_meta, [ 1601.935986] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1601.935986] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1601.935986] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1601.935986] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] self._fetch_image_if_missing(context, vi) [ 1601.935986] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1601.935986] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] image_cache(vi, tmp_image_ds_loc) [ 1601.935986] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1601.936381] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] vm_util.copy_virtual_disk( [ 1601.936381] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1601.936381] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] session._wait_for_task(vmdk_copy_task) [ 1601.936381] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1601.936381] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] return self.wait_for_task(task_ref) [ 1601.936381] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1601.936381] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] return evt.wait() [ 1601.936381] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1601.936381] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] result = hub.switch() [ 1601.936381] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1601.936381] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] return self.greenlet.switch() [ 1601.936381] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1601.936381] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] self.f(*self.args, **self.kw) [ 1601.936684] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1601.936684] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] raise exceptions.translate_fault(task_info.error) [ 1601.936684] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1601.936684] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Faults: ['InvalidArgument'] [ 1601.936684] env[62476]: ERROR nova.compute.manager [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] [ 1601.936810] env[62476]: DEBUG nova.compute.utils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1601.938359] env[62476]: DEBUG nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Build of instance bfd1d3fe-c8ba-4b77-b633-f77010674954 was re-scheduled: A specified parameter was not correct: fileType [ 1601.938359] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1601.938795] env[62476]: DEBUG nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1601.938966] env[62476]: DEBUG nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1601.939149] env[62476]: DEBUG nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1601.939314] env[62476]: DEBUG nova.network.neutron [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1602.029611] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1602.317776] env[62476]: DEBUG nova.network.neutron [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.337517] env[62476]: INFO nova.compute.manager [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Took 0.40 seconds to deallocate network for instance. [ 1602.450953] env[62476]: INFO nova.scheduler.client.report [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Deleted allocations for instance bfd1d3fe-c8ba-4b77-b633-f77010674954 [ 1602.472996] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f1de74f3-1a56-4894-a8d2-a07c82c1c6ab tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Lock "bfd1d3fe-c8ba-4b77-b633-f77010674954" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 573.481s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.474232] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "bfd1d3fe-c8ba-4b77-b633-f77010674954" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 396.038s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.474449] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] During sync_power_state the instance has a pending task (spawning). Skip. [ 1602.474639] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "bfd1d3fe-c8ba-4b77-b633-f77010674954" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.475290] env[62476]: DEBUG oslo_concurrency.lockutils [None req-00ff73f1-6148-4339-912f-d8ab8d6a1f82 tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Lock "bfd1d3fe-c8ba-4b77-b633-f77010674954" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 378.131s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.475504] env[62476]: DEBUG oslo_concurrency.lockutils [None req-00ff73f1-6148-4339-912f-d8ab8d6a1f82 tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Acquiring lock "bfd1d3fe-c8ba-4b77-b633-f77010674954-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.475703] env[62476]: DEBUG oslo_concurrency.lockutils [None req-00ff73f1-6148-4339-912f-d8ab8d6a1f82 tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Lock "bfd1d3fe-c8ba-4b77-b633-f77010674954-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.475865] env[62476]: DEBUG oslo_concurrency.lockutils [None req-00ff73f1-6148-4339-912f-d8ab8d6a1f82 tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Lock "bfd1d3fe-c8ba-4b77-b633-f77010674954-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.477640] env[62476]: INFO nova.compute.manager [None req-00ff73f1-6148-4339-912f-d8ab8d6a1f82 tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Terminating instance [ 1602.479384] env[62476]: DEBUG nova.compute.manager [None req-00ff73f1-6148-4339-912f-d8ab8d6a1f82 tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1602.479577] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-00ff73f1-6148-4339-912f-d8ab8d6a1f82 tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1602.480311] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b269521e-ca51-40e1-a0a9-a36e5c7e81cc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.490464] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b207dc96-650e-452d-8c7a-08a555529f46 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.502199] env[62476]: DEBUG nova.compute.manager [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1602.525300] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-00ff73f1-6148-4339-912f-d8ab8d6a1f82 tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bfd1d3fe-c8ba-4b77-b633-f77010674954 could not be found. [ 1602.525510] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-00ff73f1-6148-4339-912f-d8ab8d6a1f82 tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1602.525688] env[62476]: INFO nova.compute.manager [None req-00ff73f1-6148-4339-912f-d8ab8d6a1f82 tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1602.525928] env[62476]: DEBUG oslo.service.loopingcall [None req-00ff73f1-6148-4339-912f-d8ab8d6a1f82 tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1602.526176] env[62476]: DEBUG nova.compute.manager [-] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1602.526269] env[62476]: DEBUG nova.network.neutron [-] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1602.551518] env[62476]: DEBUG nova.network.neutron [-] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.557047] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.557370] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.559215] env[62476]: INFO nova.compute.claims [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1602.562345] env[62476]: INFO nova.compute.manager [-] [instance: bfd1d3fe-c8ba-4b77-b633-f77010674954] Took 0.04 seconds to deallocate network for instance. [ 1602.684825] env[62476]: DEBUG oslo_concurrency.lockutils [None req-00ff73f1-6148-4339-912f-d8ab8d6a1f82 tempest-ServerMetadataTestJSON-986134943 tempest-ServerMetadataTestJSON-986134943-project-member] Lock "bfd1d3fe-c8ba-4b77-b633-f77010674954" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.209s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.795071] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293ebcd9-9664-42d5-80f3-c8948493b5ea {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.803345] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d99d5f-73e9-4748-a12c-b30aef9fee9d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.833231] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d7704e-a724-4b01-8070-95a925dbe3c0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.841414] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84016d2f-6948-4be7-bfa9-7edef0154bdc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.854948] env[62476]: DEBUG nova.compute.provider_tree [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1602.865322] env[62476]: DEBUG nova.scheduler.client.report [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1602.880013] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.322s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.880571] env[62476]: DEBUG nova.compute.manager [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1602.914638] env[62476]: DEBUG nova.compute.utils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1602.916080] env[62476]: DEBUG nova.compute.manager [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Not allocating networking since 'none' was specified. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1602.925662] env[62476]: DEBUG nova.compute.manager [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1602.996111] env[62476]: DEBUG nova.compute.manager [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1603.023194] env[62476]: DEBUG nova.virt.hardware [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1603.023450] env[62476]: DEBUG nova.virt.hardware [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1603.023601] env[62476]: DEBUG nova.virt.hardware [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1603.023781] env[62476]: DEBUG nova.virt.hardware [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1603.023926] env[62476]: DEBUG nova.virt.hardware [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1603.024083] env[62476]: DEBUG nova.virt.hardware [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1603.024295] env[62476]: DEBUG nova.virt.hardware [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1603.024455] env[62476]: DEBUG nova.virt.hardware [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1603.024621] env[62476]: DEBUG nova.virt.hardware [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1603.024786] env[62476]: DEBUG nova.virt.hardware [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1603.024961] env[62476]: DEBUG nova.virt.hardware [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1603.025886] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a4037b-0067-4a9e-86df-2ac7ae1d242c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.028772] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1603.034638] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fda274-d7b9-4688-ab31-45e364e2e6af {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.049962] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Instance VIF info [] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1603.055503] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Creating folder: Project (24b6ea87034b437d94e8c1fc80b7576e). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1603.055797] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-794b7ee0-34a8-4ae8-be47-07210a93b9f8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.067195] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Created folder: Project (24b6ea87034b437d94e8c1fc80b7576e) in parent group-v849485. [ 1603.067442] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Creating folder: Instances. Parent ref: group-v849566. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1603.067653] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b207b900-aeeb-4e2f-bf75-f4029dab19c5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.077964] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Created folder: Instances in parent group-v849566. [ 1603.078255] env[62476]: DEBUG oslo.service.loopingcall [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1603.078471] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1603.078626] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-010c6754-a96f-453c-8cf8-362fff69c990 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.095941] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1603.095941] env[62476]: value = "task-4319149" [ 1603.095941] env[62476]: _type = "Task" [ 1603.095941] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.104033] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319149, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.605772] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319149, 'name': CreateVM_Task, 'duration_secs': 0.266838} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.606129] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1603.606397] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1603.606601] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1603.606977] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1603.607280] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf3be97f-cb1c-47e5-9ff1-7c8cefc960be {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.611823] env[62476]: DEBUG oslo_vmware.api [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Waiting for the task: (returnval){ [ 1603.611823] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]529a33f1-b2b6-cfb7-26c9-b0acc7656a85" [ 1603.611823] env[62476]: _type = "Task" [ 1603.611823] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.619551] env[62476]: DEBUG oslo_vmware.api [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]529a33f1-b2b6-cfb7-26c9-b0acc7656a85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.022829] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1604.123030] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1604.123248] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1604.123990] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.026649] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1622.904431] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "e6b815fb-fa2d-4797-8810-c2b891f375cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.904817] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "e6b815fb-fa2d-4797-8810-c2b891f375cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.325760] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquiring lock "c7e551af-a94e-48da-a725-53ebd73d43ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.585566] env[62476]: WARNING oslo_vmware.rw_handles [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1649.585566] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1649.585566] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1649.585566] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1649.585566] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1649.585566] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1649.585566] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1649.585566] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1649.585566] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1649.585566] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1649.585566] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1649.585566] env[62476]: ERROR oslo_vmware.rw_handles [ 1649.586443] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/455bed98-298e-4b9c-a809-bed3298fec68/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1649.587974] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1649.588305] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Copying Virtual Disk [datastore1] vmware_temp/455bed98-298e-4b9c-a809-bed3298fec68/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/455bed98-298e-4b9c-a809-bed3298fec68/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1649.588605] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e1be463-9305-4e3c-966d-ac175dee341c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.598112] env[62476]: DEBUG oslo_vmware.api [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Waiting for the task: (returnval){ [ 1649.598112] env[62476]: value = "task-4319150" [ 1649.598112] env[62476]: _type = "Task" [ 1649.598112] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.611937] env[62476]: DEBUG oslo_vmware.api [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Task: {'id': task-4319150, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.109272] env[62476]: DEBUG oslo_vmware.exceptions [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1650.109626] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.110285] env[62476]: ERROR nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1650.110285] env[62476]: Faults: ['InvalidArgument'] [ 1650.110285] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Traceback (most recent call last): [ 1650.110285] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1650.110285] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] yield resources [ 1650.110285] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1650.110285] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] self.driver.spawn(context, instance, image_meta, [ 1650.110285] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1650.110285] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1650.110285] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1650.110285] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] self._fetch_image_if_missing(context, vi) [ 1650.110285] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1650.110773] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] image_cache(vi, tmp_image_ds_loc) [ 1650.110773] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1650.110773] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] vm_util.copy_virtual_disk( [ 1650.110773] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1650.110773] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] session._wait_for_task(vmdk_copy_task) [ 1650.110773] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1650.110773] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] return self.wait_for_task(task_ref) [ 1650.110773] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1650.110773] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] return evt.wait() [ 1650.110773] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1650.110773] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] result = hub.switch() [ 1650.110773] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1650.110773] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] return self.greenlet.switch() [ 1650.111194] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1650.111194] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] self.f(*self.args, **self.kw) [ 1650.111194] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1650.111194] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] raise exceptions.translate_fault(task_info.error) [ 1650.111194] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1650.111194] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Faults: ['InvalidArgument'] [ 1650.111194] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] [ 1650.111194] env[62476]: INFO nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Terminating instance [ 1650.112475] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1650.112721] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1650.113012] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb26c396-510b-4ed7-9583-babe44a73533 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.116787] env[62476]: DEBUG nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1650.117030] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1650.117907] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d618fda-683f-4d30-9954-b774e5e402db {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.122622] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1650.122834] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1650.125673] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-809aaa35-c555-4e36-9a70-d581e3b41fcf {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.128149] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1650.128402] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4537d4f-7e2f-4dca-9347-e4e09a964d23 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.133330] env[62476]: DEBUG oslo_vmware.api [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for the task: (returnval){ [ 1650.133330] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52d09165-896e-5d0c-f7af-43c47a4b7122" [ 1650.133330] env[62476]: _type = "Task" [ 1650.133330] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.142159] env[62476]: DEBUG oslo_vmware.api [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52d09165-896e-5d0c-f7af-43c47a4b7122, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.195188] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1650.195470] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1650.195660] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Deleting the datastore file [datastore1] 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1650.195945] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8b4530e-5ba2-4a8d-a72a-f964397eede3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.202974] env[62476]: DEBUG oslo_vmware.api [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Waiting for the task: (returnval){ [ 1650.202974] env[62476]: value = "task-4319152" [ 1650.202974] env[62476]: _type = "Task" [ 1650.202974] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.211159] env[62476]: DEBUG oslo_vmware.api [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Task: {'id': task-4319152, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.644188] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1650.644569] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Creating directory with path [datastore1] vmware_temp/871755fb-0cc7-48d8-b470-d59808e59ae6/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1650.644684] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ca3c566-451a-4092-a768-d8426ff3fee4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.657022] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Created directory with path [datastore1] vmware_temp/871755fb-0cc7-48d8-b470-d59808e59ae6/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1650.657239] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Fetch image to [datastore1] vmware_temp/871755fb-0cc7-48d8-b470-d59808e59ae6/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1650.657411] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/871755fb-0cc7-48d8-b470-d59808e59ae6/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1650.658165] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48e2497-c32d-4f09-9c3e-6b042456cfe3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.664809] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03a646d-5bc4-45f6-858c-b86e403a179d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.674103] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79dfd0e-7d5e-491d-96b7-4c32822d6510 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.707217] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73f5e8c-06db-4a39-a377-f51ae029da5a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.716846] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2966d365-e950-4a6e-921c-e7fb5196b831 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.718589] env[62476]: DEBUG oslo_vmware.api [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Task: {'id': task-4319152, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088557} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.718813] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1650.718986] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1650.719168] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1650.719338] env[62476]: INFO nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1650.721434] env[62476]: DEBUG nova.compute.claims [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1650.721609] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.721822] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.745180] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1650.801702] env[62476]: DEBUG oslo_vmware.rw_handles [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/871755fb-0cc7-48d8-b470-d59808e59ae6/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1650.862398] env[62476]: DEBUG oslo_vmware.rw_handles [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1650.862676] env[62476]: DEBUG oslo_vmware.rw_handles [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/871755fb-0cc7-48d8-b470-d59808e59ae6/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1651.016674] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4316f86-bdab-42b4-a853-d66047a9ece4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.024681] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be78304-a7de-4106-9f07-3dac4ccbb0ab {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.054227] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48867ef-7b13-4fbc-882d-1c6fa4865e65 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.062490] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0903cb-b683-421f-9f4b-76db02012908 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.076856] env[62476]: DEBUG nova.compute.provider_tree [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1651.086159] env[62476]: DEBUG nova.scheduler.client.report [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1651.100087] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.378s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.100672] env[62476]: ERROR nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1651.100672] env[62476]: Faults: ['InvalidArgument'] [ 1651.100672] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Traceback (most recent call last): [ 1651.100672] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1651.100672] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] self.driver.spawn(context, instance, image_meta, [ 1651.100672] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1651.100672] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1651.100672] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1651.100672] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] self._fetch_image_if_missing(context, vi) [ 1651.100672] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1651.100672] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] image_cache(vi, tmp_image_ds_loc) [ 1651.100672] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1651.101152] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] vm_util.copy_virtual_disk( [ 1651.101152] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1651.101152] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] session._wait_for_task(vmdk_copy_task) [ 1651.101152] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1651.101152] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] return self.wait_for_task(task_ref) [ 1651.101152] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1651.101152] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] return evt.wait() [ 1651.101152] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1651.101152] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] result = hub.switch() [ 1651.101152] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1651.101152] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] return self.greenlet.switch() [ 1651.101152] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1651.101152] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] self.f(*self.args, **self.kw) [ 1651.101563] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1651.101563] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] raise exceptions.translate_fault(task_info.error) [ 1651.101563] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1651.101563] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Faults: ['InvalidArgument'] [ 1651.101563] env[62476]: ERROR nova.compute.manager [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] [ 1651.101563] env[62476]: DEBUG nova.compute.utils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1651.103233] env[62476]: DEBUG nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Build of instance 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d was re-scheduled: A specified parameter was not correct: fileType [ 1651.103233] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1651.103607] env[62476]: DEBUG nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1651.103780] env[62476]: DEBUG nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1651.103951] env[62476]: DEBUG nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1651.104177] env[62476]: DEBUG nova.network.neutron [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1651.443734] env[62476]: DEBUG nova.network.neutron [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1651.463762] env[62476]: INFO nova.compute.manager [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Took 0.36 seconds to deallocate network for instance. [ 1651.564893] env[62476]: INFO nova.scheduler.client.report [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Deleted allocations for instance 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d [ 1651.589359] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3984a1c0-a6c7-45ae-b76f-58633a11a345 tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 602.255s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.590665] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6bb0b348-d2d0-4f06-be93-84922c5db5af tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 405.086s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.590976] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6bb0b348-d2d0-4f06-be93-84922c5db5af tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Acquiring lock "5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.591219] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6bb0b348-d2d0-4f06-be93-84922c5db5af tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.591392] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6bb0b348-d2d0-4f06-be93-84922c5db5af tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.594030] env[62476]: INFO nova.compute.manager [None req-6bb0b348-d2d0-4f06-be93-84922c5db5af tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Terminating instance [ 1651.595445] env[62476]: DEBUG nova.compute.manager [None req-6bb0b348-d2d0-4f06-be93-84922c5db5af tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1651.595645] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6bb0b348-d2d0-4f06-be93-84922c5db5af tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1651.596633] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-659ea0e6-8672-41b1-bbee-ce1f5b30be85 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.601028] env[62476]: DEBUG nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1651.607945] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07647b5a-11fb-41ef-aeed-9ef33e6a72b6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.643703] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-6bb0b348-d2d0-4f06-be93-84922c5db5af tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d could not be found. [ 1651.643960] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-6bb0b348-d2d0-4f06-be93-84922c5db5af tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1651.644227] env[62476]: INFO nova.compute.manager [None req-6bb0b348-d2d0-4f06-be93-84922c5db5af tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1651.644544] env[62476]: DEBUG oslo.service.loopingcall [None req-6bb0b348-d2d0-4f06-be93-84922c5db5af tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1651.647099] env[62476]: DEBUG nova.compute.manager [-] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1651.647205] env[62476]: DEBUG nova.network.neutron [-] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1651.662763] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.663014] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.664460] env[62476]: INFO nova.compute.claims [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1651.675177] env[62476]: DEBUG nova.network.neutron [-] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1651.685364] env[62476]: INFO nova.compute.manager [-] [instance: 5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d] Took 0.04 seconds to deallocate network for instance. [ 1651.775698] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6bb0b348-d2d0-4f06-be93-84922c5db5af tempest-SecurityGroupsTestJSON-1602384465 tempest-SecurityGroupsTestJSON-1602384465-project-member] Lock "5e7c207f-35e1-48b7-a6eb-582a0a3a8f9d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.879260] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e125e458-fae2-4075-ba8d-76c214758b44 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.886899] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b64d48b-5686-4507-b29d-1858011a34aa {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.917802] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c8d012-3be5-456e-9e1c-4cd174801385 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.927409] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f3726b-61a3-475b-ac46-5996a380928d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.941182] env[62476]: DEBUG nova.compute.provider_tree [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1651.951952] env[62476]: DEBUG nova.scheduler.client.report [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1651.967016] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.304s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.967542] env[62476]: DEBUG nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1652.000853] env[62476]: DEBUG nova.compute.utils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1652.002353] env[62476]: DEBUG nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1652.002526] env[62476]: DEBUG nova.network.neutron [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1652.013395] env[62476]: DEBUG nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1652.079607] env[62476]: DEBUG nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1652.100041] env[62476]: DEBUG nova.policy [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9e4673294b1477d93bdae5dfef42927', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16d034f4180f4aeaa8f880c3e6767730', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1652.110025] env[62476]: DEBUG nova.virt.hardware [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1652.110370] env[62476]: DEBUG nova.virt.hardware [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1652.110611] env[62476]: DEBUG nova.virt.hardware [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1652.110800] env[62476]: DEBUG nova.virt.hardware [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1652.110958] env[62476]: DEBUG nova.virt.hardware [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1652.111179] env[62476]: DEBUG nova.virt.hardware [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1652.111440] env[62476]: DEBUG nova.virt.hardware [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1652.111638] env[62476]: DEBUG nova.virt.hardware [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1652.111877] env[62476]: DEBUG nova.virt.hardware [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1652.112091] env[62476]: DEBUG nova.virt.hardware [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1652.112311] env[62476]: DEBUG nova.virt.hardware [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1652.113244] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d637063f-df48-4b23-9309-77576094aa34 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.121827] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649029f7-d0a2-482e-9865-bde60fa5e573 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.484902] env[62476]: DEBUG nova.network.neutron [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Successfully created port: 7bd0ce47-28fd-420e-8bb2-1aca348ef112 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1653.175819] env[62476]: DEBUG nova.network.neutron [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Successfully updated port: 7bd0ce47-28fd-420e-8bb2-1aca348ef112 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1653.192918] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "refresh_cache-ea606214-a34b-4972-8948-a6ff8c55b889" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1653.193096] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired lock "refresh_cache-ea606214-a34b-4972-8948-a6ff8c55b889" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1653.193177] env[62476]: DEBUG nova.network.neutron [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1653.241976] env[62476]: DEBUG nova.network.neutron [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1653.489583] env[62476]: DEBUG nova.network.neutron [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Updating instance_info_cache with network_info: [{"id": "7bd0ce47-28fd-420e-8bb2-1aca348ef112", "address": "fa:16:3e:4d:fc:40", "network": {"id": "3ecf6641-8ea2-463b-b2bd-1da0bbd310ec", "bridge": "br-int", "label": "tempest-ImagesTestJSON-686261071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d034f4180f4aeaa8f880c3e6767730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11", "external-id": "nsx-vlan-transportzone-707", "segmentation_id": 707, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bd0ce47-28", "ovs_interfaceid": "7bd0ce47-28fd-420e-8bb2-1aca348ef112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1653.504289] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Releasing lock "refresh_cache-ea606214-a34b-4972-8948-a6ff8c55b889" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1653.504602] env[62476]: DEBUG nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Instance network_info: |[{"id": "7bd0ce47-28fd-420e-8bb2-1aca348ef112", "address": "fa:16:3e:4d:fc:40", "network": {"id": "3ecf6641-8ea2-463b-b2bd-1da0bbd310ec", "bridge": "br-int", "label": "tempest-ImagesTestJSON-686261071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d034f4180f4aeaa8f880c3e6767730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11", "external-id": "nsx-vlan-transportzone-707", "segmentation_id": 707, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bd0ce47-28", "ovs_interfaceid": "7bd0ce47-28fd-420e-8bb2-1aca348ef112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1653.505046] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:fc:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7bd0ce47-28fd-420e-8bb2-1aca348ef112', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1653.513660] env[62476]: DEBUG oslo.service.loopingcall [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1653.514211] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1653.514446] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-45b530ba-f1a7-4df1-92d9-1f9a83453363 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.533176] env[62476]: DEBUG nova.compute.manager [req-37b6f57a-536d-4bfa-b796-27ba452d18ae req-70ee6b45-e05d-4aaf-900b-cbdfd2cbf8ba service nova] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Received event network-vif-plugged-7bd0ce47-28fd-420e-8bb2-1aca348ef112 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1653.533385] env[62476]: DEBUG oslo_concurrency.lockutils [req-37b6f57a-536d-4bfa-b796-27ba452d18ae req-70ee6b45-e05d-4aaf-900b-cbdfd2cbf8ba service nova] Acquiring lock "ea606214-a34b-4972-8948-a6ff8c55b889-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.533592] env[62476]: DEBUG oslo_concurrency.lockutils [req-37b6f57a-536d-4bfa-b796-27ba452d18ae req-70ee6b45-e05d-4aaf-900b-cbdfd2cbf8ba service nova] Lock "ea606214-a34b-4972-8948-a6ff8c55b889-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.533775] env[62476]: DEBUG oslo_concurrency.lockutils [req-37b6f57a-536d-4bfa-b796-27ba452d18ae req-70ee6b45-e05d-4aaf-900b-cbdfd2cbf8ba service nova] Lock "ea606214-a34b-4972-8948-a6ff8c55b889-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.533949] env[62476]: DEBUG nova.compute.manager [req-37b6f57a-536d-4bfa-b796-27ba452d18ae req-70ee6b45-e05d-4aaf-900b-cbdfd2cbf8ba service nova] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] No waiting events found dispatching network-vif-plugged-7bd0ce47-28fd-420e-8bb2-1aca348ef112 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1653.534124] env[62476]: WARNING nova.compute.manager [req-37b6f57a-536d-4bfa-b796-27ba452d18ae req-70ee6b45-e05d-4aaf-900b-cbdfd2cbf8ba service nova] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Received unexpected event network-vif-plugged-7bd0ce47-28fd-420e-8bb2-1aca348ef112 for instance with vm_state building and task_state spawning. [ 1653.534291] env[62476]: DEBUG nova.compute.manager [req-37b6f57a-536d-4bfa-b796-27ba452d18ae req-70ee6b45-e05d-4aaf-900b-cbdfd2cbf8ba service nova] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Received event network-changed-7bd0ce47-28fd-420e-8bb2-1aca348ef112 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1653.534447] env[62476]: DEBUG nova.compute.manager [req-37b6f57a-536d-4bfa-b796-27ba452d18ae req-70ee6b45-e05d-4aaf-900b-cbdfd2cbf8ba service nova] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Refreshing instance network info cache due to event network-changed-7bd0ce47-28fd-420e-8bb2-1aca348ef112. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1653.534631] env[62476]: DEBUG oslo_concurrency.lockutils [req-37b6f57a-536d-4bfa-b796-27ba452d18ae req-70ee6b45-e05d-4aaf-900b-cbdfd2cbf8ba service nova] Acquiring lock "refresh_cache-ea606214-a34b-4972-8948-a6ff8c55b889" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1653.534770] env[62476]: DEBUG oslo_concurrency.lockutils [req-37b6f57a-536d-4bfa-b796-27ba452d18ae req-70ee6b45-e05d-4aaf-900b-cbdfd2cbf8ba service nova] Acquired lock "refresh_cache-ea606214-a34b-4972-8948-a6ff8c55b889" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1653.534921] env[62476]: DEBUG nova.network.neutron [req-37b6f57a-536d-4bfa-b796-27ba452d18ae req-70ee6b45-e05d-4aaf-900b-cbdfd2cbf8ba service nova] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Refreshing network info cache for port 7bd0ce47-28fd-420e-8bb2-1aca348ef112 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1653.541281] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1653.541281] env[62476]: value = "task-4319153" [ 1653.541281] env[62476]: _type = "Task" [ 1653.541281] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.552905] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319153, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.821218] env[62476]: DEBUG nova.network.neutron [req-37b6f57a-536d-4bfa-b796-27ba452d18ae req-70ee6b45-e05d-4aaf-900b-cbdfd2cbf8ba service nova] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Updated VIF entry in instance network info cache for port 7bd0ce47-28fd-420e-8bb2-1aca348ef112. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1653.821617] env[62476]: DEBUG nova.network.neutron [req-37b6f57a-536d-4bfa-b796-27ba452d18ae req-70ee6b45-e05d-4aaf-900b-cbdfd2cbf8ba service nova] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Updating instance_info_cache with network_info: [{"id": "7bd0ce47-28fd-420e-8bb2-1aca348ef112", "address": "fa:16:3e:4d:fc:40", "network": {"id": "3ecf6641-8ea2-463b-b2bd-1da0bbd310ec", "bridge": "br-int", "label": "tempest-ImagesTestJSON-686261071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d034f4180f4aeaa8f880c3e6767730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11", "external-id": "nsx-vlan-transportzone-707", "segmentation_id": 707, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bd0ce47-28", "ovs_interfaceid": "7bd0ce47-28fd-420e-8bb2-1aca348ef112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1653.831465] env[62476]: DEBUG oslo_concurrency.lockutils [req-37b6f57a-536d-4bfa-b796-27ba452d18ae req-70ee6b45-e05d-4aaf-900b-cbdfd2cbf8ba service nova] Releasing lock "refresh_cache-ea606214-a34b-4972-8948-a6ff8c55b889" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1654.051182] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319153, 'name': CreateVM_Task, 'duration_secs': 0.28072} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.051361] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1654.051998] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1654.052181] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1654.052496] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1654.052749] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae24bef1-d39d-4e74-ba67-a72edf41492c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.057683] env[62476]: DEBUG oslo_vmware.api [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for the task: (returnval){ [ 1654.057683] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52af8cc4-5dc5-eaf7-e6b8-7286b2915ce6" [ 1654.057683] env[62476]: _type = "Task" [ 1654.057683] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.065734] env[62476]: DEBUG oslo_vmware.api [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52af8cc4-5dc5-eaf7-e6b8-7286b2915ce6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.568862] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1654.569203] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1654.569358] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1655.026962] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1655.041017] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.041324] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.041510] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.041732] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1655.042828] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584a4b02-756d-4f4f-9e51-4532dd78cf35 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.051617] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52815b4-fb51-46c1-894a-1240f1a3d2d5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.067201] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863bbf81-93da-4879-827d-8af3eea9a421 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.073819] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea4442e-f026-450d-aaa0-72ed41c81981 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.103633] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180682MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1655.103796] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.104014] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.186350] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 4954bf5d-20db-4787-91b5-a990ed30cdf3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1655.186646] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7211a8c4-5430-4b0c-86e7-8101ed71463e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1655.186854] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6f133a49-bb62-45c6-a014-a2f99766d092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1655.187043] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a0490305-7494-4612-843f-bac04dd0f328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1655.187210] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 139391d4-af04-4053-801a-792fc4fd724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1655.187379] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 003e332b-9765-4db7-9f48-40d33c6532d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1655.187537] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 27737774-efb5-4aee-a0c0-695e78a15dd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1655.187697] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance fe895d70-4c56-4854-83bf-a66cc1623d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1655.187857] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance c7e551af-a94e-48da-a725-53ebd73d43ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1655.188116] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ea606214-a34b-4972-8948-a6ff8c55b889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1655.227435] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1d67c106-ced2-4b4e-8abd-1652bd0509d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1655.243956] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance bd7629be-54eb-4a22-a601-8ffa8ec5d4f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1655.257133] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e6b815fb-fa2d-4797-8810-c2b891f375cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1655.257466] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1655.257630] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1655.450911] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184acdcb-458c-4f7b-b303-908bb0d29354 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.460430] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768fd3c5-5b7a-4a42-826d-5df350917c38 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.493320] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c73aad-5732-429d-a115-f8490dd7878b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.501556] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4235f3-dcb1-42ab-ba3d-14cbe3291607 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.516962] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1655.525909] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1655.545607] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1655.545808] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.442s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.546331] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1659.546626] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1660.027518] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1660.027705] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1660.027828] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1660.051572] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1660.051745] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1660.051862] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1660.051989] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a0490305-7494-4612-843f-bac04dd0f328] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1660.052127] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1660.052240] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1660.052357] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1660.052479] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1660.052601] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1660.052716] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1660.052833] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1661.026990] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1662.026913] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1663.027110] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1664.027951] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1665.130203] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3f077be6-4049-4cb0-9fd3-01c1854f0447 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "ea606214-a34b-4972-8948-a6ff8c55b889" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.022618] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1666.026346] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1675.024425] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1698.079379] env[62476]: WARNING oslo_vmware.rw_handles [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1698.079379] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1698.079379] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1698.079379] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1698.079379] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1698.079379] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1698.079379] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1698.079379] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1698.079379] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1698.079379] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1698.079379] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1698.079379] env[62476]: ERROR oslo_vmware.rw_handles [ 1698.079980] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/871755fb-0cc7-48d8-b470-d59808e59ae6/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1698.081833] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1698.082112] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Copying Virtual Disk [datastore1] vmware_temp/871755fb-0cc7-48d8-b470-d59808e59ae6/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/871755fb-0cc7-48d8-b470-d59808e59ae6/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1698.082419] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f844289d-4bd6-4d2d-8a37-d9fb860e75f7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.090929] env[62476]: DEBUG oslo_vmware.api [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for the task: (returnval){ [ 1698.090929] env[62476]: value = "task-4319154" [ 1698.090929] env[62476]: _type = "Task" [ 1698.090929] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.098802] env[62476]: DEBUG oslo_vmware.api [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': task-4319154, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.601281] env[62476]: DEBUG oslo_vmware.exceptions [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1698.601580] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1698.602147] env[62476]: ERROR nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1698.602147] env[62476]: Faults: ['InvalidArgument'] [ 1698.602147] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Traceback (most recent call last): [ 1698.602147] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1698.602147] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] yield resources [ 1698.602147] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1698.602147] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] self.driver.spawn(context, instance, image_meta, [ 1698.602147] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1698.602147] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1698.602147] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1698.602147] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] self._fetch_image_if_missing(context, vi) [ 1698.602147] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1698.602147] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] image_cache(vi, tmp_image_ds_loc) [ 1698.602506] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1698.602506] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] vm_util.copy_virtual_disk( [ 1698.602506] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1698.602506] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] session._wait_for_task(vmdk_copy_task) [ 1698.602506] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1698.602506] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] return self.wait_for_task(task_ref) [ 1698.602506] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1698.602506] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] return evt.wait() [ 1698.602506] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1698.602506] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] result = hub.switch() [ 1698.602506] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1698.602506] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] return self.greenlet.switch() [ 1698.602506] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1698.602960] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] self.f(*self.args, **self.kw) [ 1698.602960] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1698.602960] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] raise exceptions.translate_fault(task_info.error) [ 1698.602960] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1698.602960] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Faults: ['InvalidArgument'] [ 1698.602960] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] [ 1698.602960] env[62476]: INFO nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Terminating instance [ 1698.604261] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1698.604598] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1698.604957] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2601b50a-af84-483e-a55b-42362c02471f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.607199] env[62476]: DEBUG nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1698.607401] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1698.608131] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83bd904-fa41-47fd-b015-419a9b4458bd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.615605] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1698.615842] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79ca1fd0-b7f2-4296-b73a-ba037c8c0757 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.618092] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1698.618266] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1698.619250] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-507dddbf-c5ae-40a3-86b8-61d228f8790f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.624309] env[62476]: DEBUG oslo_vmware.api [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for the task: (returnval){ [ 1698.624309] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52defb1a-9e18-a0ef-6f62-2039aeb3d5c7" [ 1698.624309] env[62476]: _type = "Task" [ 1698.624309] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.634125] env[62476]: DEBUG oslo_vmware.api [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52defb1a-9e18-a0ef-6f62-2039aeb3d5c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.688514] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1698.688790] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1698.688991] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Deleting the datastore file [datastore1] 4954bf5d-20db-4787-91b5-a990ed30cdf3 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1698.689339] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1820646-dd07-4478-94fd-d2729b276754 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.696417] env[62476]: DEBUG oslo_vmware.api [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for the task: (returnval){ [ 1698.696417] env[62476]: value = "task-4319156" [ 1698.696417] env[62476]: _type = "Task" [ 1698.696417] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.704638] env[62476]: DEBUG oslo_vmware.api [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': task-4319156, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.134572] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1699.134881] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Creating directory with path [datastore1] vmware_temp/d45c56d0-de6d-4072-b09d-64988857d855/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1699.135069] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-671c2bd6-13f6-42b0-a901-da59080375c0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.146910] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Created directory with path [datastore1] vmware_temp/d45c56d0-de6d-4072-b09d-64988857d855/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1699.147059] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Fetch image to [datastore1] vmware_temp/d45c56d0-de6d-4072-b09d-64988857d855/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1699.147241] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/d45c56d0-de6d-4072-b09d-64988857d855/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1699.148095] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5aef89-dde8-419f-a151-0809ab458798 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.155076] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784ee5bc-5bbc-4a7c-a370-4d9372947136 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.164544] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87009a89-bf0b-49ec-bcfb-99ae187d8995 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.194833] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51bd38c-aeca-498c-943e-7558347248f7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.205896] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-206971b1-177e-4600-a20e-771c36f6ee9e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.207571] env[62476]: DEBUG oslo_vmware.api [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': task-4319156, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072838} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.207815] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1699.207995] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1699.208182] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1699.208360] env[62476]: INFO nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1699.210475] env[62476]: DEBUG nova.compute.claims [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1699.210642] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.210854] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.236483] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1699.296388] env[62476]: DEBUG oslo_vmware.rw_handles [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d45c56d0-de6d-4072-b09d-64988857d855/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1699.359300] env[62476]: DEBUG oslo_vmware.rw_handles [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1699.359508] env[62476]: DEBUG oslo_vmware.rw_handles [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d45c56d0-de6d-4072-b09d-64988857d855/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1699.483381] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb71a38-f4a5-4082-8f09-f41878fe2f22 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.491420] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad77905-dd4c-4bd0-9b40-f26d79a1385c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.522473] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be80c36-3006-423a-a93b-199bd0766f31 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.530452] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd90c83-8dbb-4ee8-a859-4b5a4b99f277 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.544130] env[62476]: DEBUG nova.compute.provider_tree [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1699.553757] env[62476]: DEBUG nova.scheduler.client.report [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1699.569134] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.358s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.569683] env[62476]: ERROR nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1699.569683] env[62476]: Faults: ['InvalidArgument'] [ 1699.569683] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Traceback (most recent call last): [ 1699.569683] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1699.569683] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] self.driver.spawn(context, instance, image_meta, [ 1699.569683] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1699.569683] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1699.569683] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1699.569683] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] self._fetch_image_if_missing(context, vi) [ 1699.569683] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1699.569683] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] image_cache(vi, tmp_image_ds_loc) [ 1699.569683] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1699.570091] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] vm_util.copy_virtual_disk( [ 1699.570091] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1699.570091] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] session._wait_for_task(vmdk_copy_task) [ 1699.570091] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1699.570091] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] return self.wait_for_task(task_ref) [ 1699.570091] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1699.570091] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] return evt.wait() [ 1699.570091] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1699.570091] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] result = hub.switch() [ 1699.570091] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1699.570091] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] return self.greenlet.switch() [ 1699.570091] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1699.570091] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] self.f(*self.args, **self.kw) [ 1699.570719] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1699.570719] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] raise exceptions.translate_fault(task_info.error) [ 1699.570719] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1699.570719] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Faults: ['InvalidArgument'] [ 1699.570719] env[62476]: ERROR nova.compute.manager [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] [ 1699.570719] env[62476]: DEBUG nova.compute.utils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1699.571991] env[62476]: DEBUG nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Build of instance 4954bf5d-20db-4787-91b5-a990ed30cdf3 was re-scheduled: A specified parameter was not correct: fileType [ 1699.571991] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1699.572398] env[62476]: DEBUG nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1699.572571] env[62476]: DEBUG nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1699.572747] env[62476]: DEBUG nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1699.572911] env[62476]: DEBUG nova.network.neutron [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1699.896976] env[62476]: DEBUG nova.network.neutron [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.908296] env[62476]: INFO nova.compute.manager [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Took 0.34 seconds to deallocate network for instance. [ 1700.016831] env[62476]: INFO nova.scheduler.client.report [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Deleted allocations for instance 4954bf5d-20db-4787-91b5-a990ed30cdf3 [ 1700.039306] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b7739807-f576-42f6-96bf-5736133dc39d tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "4954bf5d-20db-4787-91b5-a990ed30cdf3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 625.475s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.040711] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1fa9eade-2c85-4b65-8dd2-7894e8f04aac tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "4954bf5d-20db-4787-91b5-a990ed30cdf3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 428.802s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.040804] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1fa9eade-2c85-4b65-8dd2-7894e8f04aac tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "4954bf5d-20db-4787-91b5-a990ed30cdf3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.040995] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1fa9eade-2c85-4b65-8dd2-7894e8f04aac tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "4954bf5d-20db-4787-91b5-a990ed30cdf3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.041800] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1fa9eade-2c85-4b65-8dd2-7894e8f04aac tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "4954bf5d-20db-4787-91b5-a990ed30cdf3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.043613] env[62476]: INFO nova.compute.manager [None req-1fa9eade-2c85-4b65-8dd2-7894e8f04aac tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Terminating instance [ 1700.045494] env[62476]: DEBUG nova.compute.manager [None req-1fa9eade-2c85-4b65-8dd2-7894e8f04aac tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1700.045698] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa9eade-2c85-4b65-8dd2-7894e8f04aac tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1700.046430] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45750962-5ddd-45d5-b218-6928ddfff4f8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.053433] env[62476]: DEBUG nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1700.062223] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1328d6bf-eab5-40eb-a927-d1a394d5e53f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.095097] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-1fa9eade-2c85-4b65-8dd2-7894e8f04aac tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4954bf5d-20db-4787-91b5-a990ed30cdf3 could not be found. [ 1700.095344] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa9eade-2c85-4b65-8dd2-7894e8f04aac tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1700.095526] env[62476]: INFO nova.compute.manager [None req-1fa9eade-2c85-4b65-8dd2-7894e8f04aac tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1700.095788] env[62476]: DEBUG oslo.service.loopingcall [None req-1fa9eade-2c85-4b65-8dd2-7894e8f04aac tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1700.096049] env[62476]: DEBUG nova.compute.manager [-] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1700.096166] env[62476]: DEBUG nova.network.neutron [-] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1700.125021] env[62476]: DEBUG nova.network.neutron [-] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.125021] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.125021] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.126731] env[62476]: INFO nova.compute.claims [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1700.134539] env[62476]: INFO nova.compute.manager [-] [instance: 4954bf5d-20db-4787-91b5-a990ed30cdf3] Took 0.04 seconds to deallocate network for instance. [ 1700.244771] env[62476]: DEBUG oslo_concurrency.lockutils [None req-1fa9eade-2c85-4b65-8dd2-7894e8f04aac tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "4954bf5d-20db-4787-91b5-a990ed30cdf3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.204s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.346121] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5880a462-42db-47b0-a1a7-94be06e269b7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.354444] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53d0314-009f-4325-a733-af5697bed445 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.384080] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0100b899-e399-44ed-93a0-88a7a82003dd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.391568] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94df57d3-79d8-47d2-a9ff-b8e64a79126a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.404891] env[62476]: DEBUG nova.compute.provider_tree [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1700.415272] env[62476]: DEBUG nova.scheduler.client.report [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1700.429835] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.305s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.430336] env[62476]: DEBUG nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1700.467536] env[62476]: DEBUG nova.compute.utils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1700.469136] env[62476]: DEBUG nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1700.469756] env[62476]: DEBUG nova.network.neutron [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1700.488143] env[62476]: DEBUG nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1700.561800] env[62476]: DEBUG nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1700.566649] env[62476]: DEBUG nova.policy [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a117f106402424280e477babc21990c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f16c7f1cb3ec41ffbdd622e3ee5992ec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1700.592701] env[62476]: DEBUG nova.virt.hardware [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1700.592927] env[62476]: DEBUG nova.virt.hardware [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1700.593104] env[62476]: DEBUG nova.virt.hardware [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1700.593289] env[62476]: DEBUG nova.virt.hardware [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1700.593432] env[62476]: DEBUG nova.virt.hardware [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1700.593570] env[62476]: DEBUG nova.virt.hardware [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1700.593771] env[62476]: DEBUG nova.virt.hardware [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1700.593920] env[62476]: DEBUG nova.virt.hardware [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1700.594093] env[62476]: DEBUG nova.virt.hardware [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1700.594256] env[62476]: DEBUG nova.virt.hardware [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1700.594421] env[62476]: DEBUG nova.virt.hardware [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1700.595311] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79fcd94-673f-4d04-aeef-4d83ffb80f12 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.604623] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62151079-e978-42cf-9c2c-ee5c991cb91d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.954710] env[62476]: DEBUG nova.network.neutron [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Successfully created port: c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1701.678199] env[62476]: DEBUG nova.network.neutron [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Successfully updated port: c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1701.691843] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "refresh_cache-1d67c106-ced2-4b4e-8abd-1652bd0509d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.691988] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired lock "refresh_cache-1d67c106-ced2-4b4e-8abd-1652bd0509d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.692152] env[62476]: DEBUG nova.network.neutron [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1701.736437] env[62476]: DEBUG nova.network.neutron [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1701.939190] env[62476]: DEBUG nova.compute.manager [req-209413d6-77d2-4719-9201-b11bce401243 req-c9d41cbc-ff1d-4431-a81d-43f8d6ab7733 service nova] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Received event network-vif-plugged-c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1701.939425] env[62476]: DEBUG oslo_concurrency.lockutils [req-209413d6-77d2-4719-9201-b11bce401243 req-c9d41cbc-ff1d-4431-a81d-43f8d6ab7733 service nova] Acquiring lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.939632] env[62476]: DEBUG oslo_concurrency.lockutils [req-209413d6-77d2-4719-9201-b11bce401243 req-c9d41cbc-ff1d-4431-a81d-43f8d6ab7733 service nova] Lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.939799] env[62476]: DEBUG oslo_concurrency.lockutils [req-209413d6-77d2-4719-9201-b11bce401243 req-c9d41cbc-ff1d-4431-a81d-43f8d6ab7733 service nova] Lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.939978] env[62476]: DEBUG nova.compute.manager [req-209413d6-77d2-4719-9201-b11bce401243 req-c9d41cbc-ff1d-4431-a81d-43f8d6ab7733 service nova] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] No waiting events found dispatching network-vif-plugged-c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1701.940298] env[62476]: WARNING nova.compute.manager [req-209413d6-77d2-4719-9201-b11bce401243 req-c9d41cbc-ff1d-4431-a81d-43f8d6ab7733 service nova] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Received unexpected event network-vif-plugged-c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5 for instance with vm_state building and task_state spawning. [ 1701.940494] env[62476]: DEBUG nova.compute.manager [req-209413d6-77d2-4719-9201-b11bce401243 req-c9d41cbc-ff1d-4431-a81d-43f8d6ab7733 service nova] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Received event network-changed-c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1701.940653] env[62476]: DEBUG nova.compute.manager [req-209413d6-77d2-4719-9201-b11bce401243 req-c9d41cbc-ff1d-4431-a81d-43f8d6ab7733 service nova] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Refreshing instance network info cache due to event network-changed-c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1701.940819] env[62476]: DEBUG oslo_concurrency.lockutils [req-209413d6-77d2-4719-9201-b11bce401243 req-c9d41cbc-ff1d-4431-a81d-43f8d6ab7733 service nova] Acquiring lock "refresh_cache-1d67c106-ced2-4b4e-8abd-1652bd0509d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.954882] env[62476]: DEBUG nova.network.neutron [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Updating instance_info_cache with network_info: [{"id": "c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5", "address": "fa:16:3e:66:6d:35", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4846a35-4a", "ovs_interfaceid": "c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1701.965191] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Releasing lock "refresh_cache-1d67c106-ced2-4b4e-8abd-1652bd0509d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1701.965465] env[62476]: DEBUG nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Instance network_info: |[{"id": "c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5", "address": "fa:16:3e:66:6d:35", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4846a35-4a", "ovs_interfaceid": "c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1701.965753] env[62476]: DEBUG oslo_concurrency.lockutils [req-209413d6-77d2-4719-9201-b11bce401243 req-c9d41cbc-ff1d-4431-a81d-43f8d6ab7733 service nova] Acquired lock "refresh_cache-1d67c106-ced2-4b4e-8abd-1652bd0509d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.965933] env[62476]: DEBUG nova.network.neutron [req-209413d6-77d2-4719-9201-b11bce401243 req-c9d41cbc-ff1d-4431-a81d-43f8d6ab7733 service nova] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Refreshing network info cache for port c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1701.966994] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:6d:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3734b156-0f7d-4721-b23c-d000412ec2eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1701.974642] env[62476]: DEBUG oslo.service.loopingcall [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1701.975614] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1701.977819] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-313c8e3c-7aa2-48d6-8fdd-311203b6c6ac {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.998850] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1701.998850] env[62476]: value = "task-4319157" [ 1701.998850] env[62476]: _type = "Task" [ 1701.998850] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.008381] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319157, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.320169] env[62476]: DEBUG nova.network.neutron [req-209413d6-77d2-4719-9201-b11bce401243 req-c9d41cbc-ff1d-4431-a81d-43f8d6ab7733 service nova] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Updated VIF entry in instance network info cache for port c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1702.320548] env[62476]: DEBUG nova.network.neutron [req-209413d6-77d2-4719-9201-b11bce401243 req-c9d41cbc-ff1d-4431-a81d-43f8d6ab7733 service nova] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Updating instance_info_cache with network_info: [{"id": "c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5", "address": "fa:16:3e:66:6d:35", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4846a35-4a", "ovs_interfaceid": "c4846a35-4ac0-4528-a7a2-cfe7f7fc4ff5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.333826] env[62476]: DEBUG oslo_concurrency.lockutils [req-209413d6-77d2-4719-9201-b11bce401243 req-c9d41cbc-ff1d-4431-a81d-43f8d6ab7733 service nova] Releasing lock "refresh_cache-1d67c106-ced2-4b4e-8abd-1652bd0509d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.509075] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319157, 'name': CreateVM_Task, 'duration_secs': 0.310181} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.509250] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1702.509903] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1702.510087] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.510479] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1702.510735] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c12bf16a-c4ce-45dc-bf88-1816b4901787 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.515848] env[62476]: DEBUG oslo_vmware.api [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for the task: (returnval){ [ 1702.515848] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52cec48f-d705-f4c1-476a-f7342442aa48" [ 1702.515848] env[62476]: _type = "Task" [ 1702.515848] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.525694] env[62476]: DEBUG oslo_vmware.api [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52cec48f-d705-f4c1-476a-f7342442aa48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.026694] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1703.027064] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1703.027180] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1710.208999] env[62476]: DEBUG oslo_concurrency.lockutils [None req-4f1ee34c-a376-440d-bf7e-6e8136a8abbc tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.028054] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1716.039690] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.039915] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.040110] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.040351] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1716.041452] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c2472e-0c09-4d0e-9323-30bc9838e126 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.050486] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd104b4e-6416-4d15-8c35-b7ed124fa73a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.065527] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01540b75-2970-4d2a-a5c0-c358e7d2e31c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.072936] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca36186-de0d-4a51-a090-33313700d062 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.104870] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180721MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1716.105120] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.105265] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.181500] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 7211a8c4-5430-4b0c-86e7-8101ed71463e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1716.181654] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6f133a49-bb62-45c6-a014-a2f99766d092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1716.181771] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a0490305-7494-4612-843f-bac04dd0f328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1716.181895] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 139391d4-af04-4053-801a-792fc4fd724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1716.182021] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 003e332b-9765-4db7-9f48-40d33c6532d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1716.182151] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 27737774-efb5-4aee-a0c0-695e78a15dd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1716.182291] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance fe895d70-4c56-4854-83bf-a66cc1623d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1716.182433] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance c7e551af-a94e-48da-a725-53ebd73d43ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1716.182588] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ea606214-a34b-4972-8948-a6ff8c55b889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1716.182654] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1d67c106-ced2-4b4e-8abd-1652bd0509d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1716.199988] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance bd7629be-54eb-4a22-a601-8ffa8ec5d4f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1716.211811] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e6b815fb-fa2d-4797-8810-c2b891f375cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1716.212044] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1716.212211] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1716.375612] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab182ea7-0a6c-4874-955e-bec2963ff571 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.384080] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851e6d23-72e4-42ab-a56a-42e7fab08be1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.414796] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f5c6f0-e0fb-4798-ac5c-6bfe1ca7aa78 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.423108] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e735211-b30f-4d9f-9b6f-9f8a4e8df31b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.436949] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1716.445778] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1716.460981] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1716.461208] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.356s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.461381] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1721.461704] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1721.461704] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1721.482464] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1721.482464] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1721.482464] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a0490305-7494-4612-843f-bac04dd0f328] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1721.482670] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1721.482670] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1721.482737] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1721.482862] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1721.482984] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1721.483159] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1721.483283] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1721.483406] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1721.483905] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1721.484275] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1722.027309] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.027619] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1725.027552] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1726.022885] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1726.026474] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1726.026652] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1728.563817] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.564125] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.576528] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "9497c622-7f14-4fc2-ac24-d611897a8be9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.577273] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "9497c622-7f14-4fc2-ac24-d611897a8be9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.076616] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "3462762c-09da-473b-b2ba-4dce6c99dd8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.076831] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "3462762c-09da-473b-b2ba-4dce6c99dd8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1745.472212] env[62476]: WARNING oslo_vmware.rw_handles [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1745.472212] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1745.472212] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1745.472212] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1745.472212] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1745.472212] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1745.472212] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1745.472212] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1745.472212] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1745.472212] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1745.472212] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1745.472212] env[62476]: ERROR oslo_vmware.rw_handles [ 1745.472852] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/d45c56d0-de6d-4072-b09d-64988857d855/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1745.474961] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1745.475227] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Copying Virtual Disk [datastore1] vmware_temp/d45c56d0-de6d-4072-b09d-64988857d855/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/d45c56d0-de6d-4072-b09d-64988857d855/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1745.475536] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-304e636f-1e04-4954-a76c-1c7cf0e853f2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.484840] env[62476]: DEBUG oslo_vmware.api [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for the task: (returnval){ [ 1745.484840] env[62476]: value = "task-4319158" [ 1745.484840] env[62476]: _type = "Task" [ 1745.484840] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.493204] env[62476]: DEBUG oslo_vmware.api [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': task-4319158, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.996071] env[62476]: DEBUG oslo_vmware.exceptions [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1745.996071] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.996258] env[62476]: ERROR nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1745.996258] env[62476]: Faults: ['InvalidArgument'] [ 1745.996258] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Traceback (most recent call last): [ 1745.996258] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1745.996258] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] yield resources [ 1745.996258] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1745.996258] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] self.driver.spawn(context, instance, image_meta, [ 1745.996258] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1745.996258] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1745.996258] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1745.996258] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] self._fetch_image_if_missing(context, vi) [ 1745.996258] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1745.996772] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] image_cache(vi, tmp_image_ds_loc) [ 1745.996772] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1745.996772] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] vm_util.copy_virtual_disk( [ 1745.996772] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1745.996772] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] session._wait_for_task(vmdk_copy_task) [ 1745.996772] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1745.996772] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] return self.wait_for_task(task_ref) [ 1745.996772] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1745.996772] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] return evt.wait() [ 1745.996772] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1745.996772] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] result = hub.switch() [ 1745.996772] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1745.996772] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] return self.greenlet.switch() [ 1745.997449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1745.997449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] self.f(*self.args, **self.kw) [ 1745.997449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1745.997449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] raise exceptions.translate_fault(task_info.error) [ 1745.997449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1745.997449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Faults: ['InvalidArgument'] [ 1745.997449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] [ 1745.997449] env[62476]: INFO nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Terminating instance [ 1745.998178] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1745.998388] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1745.998631] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0112a0b-2f58-45bd-9540-14be9d8dc443 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.001042] env[62476]: DEBUG nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1746.001245] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1746.002023] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a535ce4-a3ad-45d2-ae2c-3b397ed016dd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.009723] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1746.010939] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e734f507-86c2-4ce4-9b1c-17d1be621b5f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.013050] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1746.013050] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1746.014061] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9fb8f1c-b7cb-469d-bf89-948bc903aae5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.019494] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Waiting for the task: (returnval){ [ 1746.019494] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]5286717c-8a2d-0d1d-016f-8fc73212ab55" [ 1746.019494] env[62476]: _type = "Task" [ 1746.019494] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.027976] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]5286717c-8a2d-0d1d-016f-8fc73212ab55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.086219] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1746.086462] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1746.086648] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Deleting the datastore file [datastore1] 7211a8c4-5430-4b0c-86e7-8101ed71463e {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1746.086934] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20808ac7-84f3-43f3-b36b-ffd4e40b4e2d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.094606] env[62476]: DEBUG oslo_vmware.api [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for the task: (returnval){ [ 1746.094606] env[62476]: value = "task-4319160" [ 1746.094606] env[62476]: _type = "Task" [ 1746.094606] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.103057] env[62476]: DEBUG oslo_vmware.api [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': task-4319160, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.530076] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1746.530434] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Creating directory with path [datastore1] vmware_temp/20f9fbc4-f080-473d-b20c-fe44173c4c48/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1746.530534] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce3409a2-419a-4fd8-bff0-011e46aa9923 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.543581] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Created directory with path [datastore1] vmware_temp/20f9fbc4-f080-473d-b20c-fe44173c4c48/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1746.543811] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Fetch image to [datastore1] vmware_temp/20f9fbc4-f080-473d-b20c-fe44173c4c48/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1746.543941] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/20f9fbc4-f080-473d-b20c-fe44173c4c48/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1746.544717] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0719ebe6-fe80-43f4-8a42-7338bbd8c4d7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.551623] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f7ec9a3-b111-42ff-b4bf-60cac66942f0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.560956] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec26eb5-2c6b-4dec-ac45-be93e97f907e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.592035] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ff9894-4f54-49a8-ac8b-d3ecc2b14e4c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.600700] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2dada3b5-0abb-42d5-af94-c0255899cbdb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.605055] env[62476]: DEBUG oslo_vmware.api [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': task-4319160, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073585} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.605612] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1746.605829] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1746.606041] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1746.606222] env[62476]: INFO nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1746.608323] env[62476]: DEBUG nova.compute.claims [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1746.608500] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.608738] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.625428] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1746.741744] env[62476]: DEBUG oslo_vmware.rw_handles [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/20f9fbc4-f080-473d-b20c-fe44173c4c48/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1746.804585] env[62476]: DEBUG oslo_vmware.rw_handles [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1746.804787] env[62476]: DEBUG oslo_vmware.rw_handles [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/20f9fbc4-f080-473d-b20c-fe44173c4c48/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1746.896261] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db14fb0-ad67-4c41-8390-bec8ebe20114 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.904657] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e15ce4-dc1b-4fb3-9570-01ded2352520 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.936044] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c65afb-23b8-4323-a6d6-ab09cdb09317 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.943635] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcd7e59-5358-4a8e-92b2-41845211d84f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.957045] env[62476]: DEBUG nova.compute.provider_tree [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1746.966262] env[62476]: DEBUG nova.scheduler.client.report [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1746.981453] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.373s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.982091] env[62476]: ERROR nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1746.982091] env[62476]: Faults: ['InvalidArgument'] [ 1746.982091] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Traceback (most recent call last): [ 1746.982091] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1746.982091] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] self.driver.spawn(context, instance, image_meta, [ 1746.982091] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1746.982091] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1746.982091] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1746.982091] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] self._fetch_image_if_missing(context, vi) [ 1746.982091] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1746.982091] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] image_cache(vi, tmp_image_ds_loc) [ 1746.982091] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1746.982449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] vm_util.copy_virtual_disk( [ 1746.982449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1746.982449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] session._wait_for_task(vmdk_copy_task) [ 1746.982449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1746.982449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] return self.wait_for_task(task_ref) [ 1746.982449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1746.982449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] return evt.wait() [ 1746.982449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1746.982449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] result = hub.switch() [ 1746.982449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1746.982449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] return self.greenlet.switch() [ 1746.982449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1746.982449] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] self.f(*self.args, **self.kw) [ 1746.982787] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1746.982787] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] raise exceptions.translate_fault(task_info.error) [ 1746.982787] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1746.982787] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Faults: ['InvalidArgument'] [ 1746.982787] env[62476]: ERROR nova.compute.manager [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] [ 1746.982919] env[62476]: DEBUG nova.compute.utils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1746.984531] env[62476]: DEBUG nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Build of instance 7211a8c4-5430-4b0c-86e7-8101ed71463e was re-scheduled: A specified parameter was not correct: fileType [ 1746.984531] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1746.984916] env[62476]: DEBUG nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1746.985108] env[62476]: DEBUG nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1746.985285] env[62476]: DEBUG nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1746.985487] env[62476]: DEBUG nova.network.neutron [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1747.430140] env[62476]: DEBUG nova.network.neutron [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1747.441561] env[62476]: INFO nova.compute.manager [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Took 0.46 seconds to deallocate network for instance. [ 1747.568371] env[62476]: INFO nova.scheduler.client.report [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Deleted allocations for instance 7211a8c4-5430-4b0c-86e7-8101ed71463e [ 1747.594911] env[62476]: DEBUG oslo_concurrency.lockutils [None req-42c66bb2-55f8-4aa1-8353-a5a45314aa78 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "7211a8c4-5430-4b0c-86e7-8101ed71463e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 626.854s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.596109] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2e07411d-6c1f-4864-8b40-fda6a1ac3ecd tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "7211a8c4-5430-4b0c-86e7-8101ed71463e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 430.527s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.596334] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2e07411d-6c1f-4864-8b40-fda6a1ac3ecd tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "7211a8c4-5430-4b0c-86e7-8101ed71463e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.596538] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2e07411d-6c1f-4864-8b40-fda6a1ac3ecd tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "7211a8c4-5430-4b0c-86e7-8101ed71463e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.597290] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2e07411d-6c1f-4864-8b40-fda6a1ac3ecd tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "7211a8c4-5430-4b0c-86e7-8101ed71463e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.598883] env[62476]: INFO nova.compute.manager [None req-2e07411d-6c1f-4864-8b40-fda6a1ac3ecd tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Terminating instance [ 1747.600734] env[62476]: DEBUG nova.compute.manager [None req-2e07411d-6c1f-4864-8b40-fda6a1ac3ecd tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1747.600940] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2e07411d-6c1f-4864-8b40-fda6a1ac3ecd tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1747.601432] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88185f52-9f83-4519-bd2d-ae4b65546944 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.612934] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617916d3-cde8-4750-bf36-c7e53d98771b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.623878] env[62476]: DEBUG nova.compute.manager [None req-6f3e1d22-462b-4879-b808-2e751082a43e tempest-ServersTestMultiNic-944024043 tempest-ServersTestMultiNic-944024043-project-member] [instance: bd7629be-54eb-4a22-a601-8ffa8ec5d4f0] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1747.645445] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-2e07411d-6c1f-4864-8b40-fda6a1ac3ecd tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7211a8c4-5430-4b0c-86e7-8101ed71463e could not be found. [ 1747.645664] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-2e07411d-6c1f-4864-8b40-fda6a1ac3ecd tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1747.645845] env[62476]: INFO nova.compute.manager [None req-2e07411d-6c1f-4864-8b40-fda6a1ac3ecd tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1747.646099] env[62476]: DEBUG oslo.service.loopingcall [None req-2e07411d-6c1f-4864-8b40-fda6a1ac3ecd tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1747.646326] env[62476]: DEBUG nova.compute.manager [-] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1747.646480] env[62476]: DEBUG nova.network.neutron [-] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1747.653883] env[62476]: DEBUG nova.compute.manager [None req-6f3e1d22-462b-4879-b808-2e751082a43e tempest-ServersTestMultiNic-944024043 tempest-ServersTestMultiNic-944024043-project-member] [instance: bd7629be-54eb-4a22-a601-8ffa8ec5d4f0] Instance disappeared before build. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1747.685344] env[62476]: DEBUG oslo_concurrency.lockutils [None req-6f3e1d22-462b-4879-b808-2e751082a43e tempest-ServersTestMultiNic-944024043 tempest-ServersTestMultiNic-944024043-project-member] Lock "bd7629be-54eb-4a22-a601-8ffa8ec5d4f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.684s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.689987] env[62476]: DEBUG nova.network.neutron [-] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1747.698350] env[62476]: INFO nova.compute.manager [-] [instance: 7211a8c4-5430-4b0c-86e7-8101ed71463e] Took 0.05 seconds to deallocate network for instance. [ 1747.705632] env[62476]: DEBUG nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1747.763975] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.764414] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.766876] env[62476]: INFO nova.compute.claims [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1747.795709] env[62476]: DEBUG oslo_concurrency.lockutils [None req-2e07411d-6c1f-4864-8b40-fda6a1ac3ecd tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "7211a8c4-5430-4b0c-86e7-8101ed71463e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.200s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.992425] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f064b8f-fd1a-4f4a-b581-9368c0395f1d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.001259] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a96855-0d3e-42db-8947-e4a1e244a84c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.030711] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32ad3ce-ee86-40e2-a5b6-9c8ce199a955 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.038133] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07cb790a-cf9b-4046-9e99-8e09e4ac8915 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.052719] env[62476]: DEBUG nova.compute.provider_tree [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1748.062864] env[62476]: DEBUG nova.scheduler.client.report [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1748.079202] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.315s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1748.079777] env[62476]: DEBUG nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1748.114905] env[62476]: DEBUG nova.compute.utils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1748.116608] env[62476]: DEBUG nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1748.116608] env[62476]: DEBUG nova.network.neutron [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1748.126118] env[62476]: DEBUG nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1748.176832] env[62476]: DEBUG nova.policy [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10dc9791cc96471c926e4eb8e1129b2f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cdbe9b66c724475a673e94fdb118821', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1748.193660] env[62476]: DEBUG nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1748.224565] env[62476]: DEBUG nova.virt.hardware [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1748.224817] env[62476]: DEBUG nova.virt.hardware [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1748.224974] env[62476]: DEBUG nova.virt.hardware [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1748.225172] env[62476]: DEBUG nova.virt.hardware [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1748.225319] env[62476]: DEBUG nova.virt.hardware [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1748.225463] env[62476]: DEBUG nova.virt.hardware [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1748.225672] env[62476]: DEBUG nova.virt.hardware [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1748.225834] env[62476]: DEBUG nova.virt.hardware [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1748.226008] env[62476]: DEBUG nova.virt.hardware [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1748.226179] env[62476]: DEBUG nova.virt.hardware [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1748.226354] env[62476]: DEBUG nova.virt.hardware [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1748.227255] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af8a32b-0f71-40fd-9856-26fe4a587b78 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.237775] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5c203f-fdc3-4ff9-ab71-56674fb5e6a9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.648190] env[62476]: DEBUG nova.network.neutron [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Successfully created port: e56ad835-6a7c-4744-884c-55e923967b3c {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1749.377181] env[62476]: DEBUG nova.network.neutron [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Successfully updated port: e56ad835-6a7c-4744-884c-55e923967b3c {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1749.387324] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "refresh_cache-e6b815fb-fa2d-4797-8810-c2b891f375cf" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.387590] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquired lock "refresh_cache-e6b815fb-fa2d-4797-8810-c2b891f375cf" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.387821] env[62476]: DEBUG nova.network.neutron [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1749.463181] env[62476]: DEBUG nova.network.neutron [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1749.502569] env[62476]: DEBUG nova.compute.manager [req-25162044-f1fc-49e6-8698-2cc0954d60b9 req-0c671f62-5148-4915-af0c-b65bea81c2f6 service nova] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Received event network-vif-plugged-e56ad835-6a7c-4744-884c-55e923967b3c {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1749.503887] env[62476]: DEBUG oslo_concurrency.lockutils [req-25162044-f1fc-49e6-8698-2cc0954d60b9 req-0c671f62-5148-4915-af0c-b65bea81c2f6 service nova] Acquiring lock "e6b815fb-fa2d-4797-8810-c2b891f375cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.503887] env[62476]: DEBUG oslo_concurrency.lockutils [req-25162044-f1fc-49e6-8698-2cc0954d60b9 req-0c671f62-5148-4915-af0c-b65bea81c2f6 service nova] Lock "e6b815fb-fa2d-4797-8810-c2b891f375cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.503887] env[62476]: DEBUG oslo_concurrency.lockutils [req-25162044-f1fc-49e6-8698-2cc0954d60b9 req-0c671f62-5148-4915-af0c-b65bea81c2f6 service nova] Lock "e6b815fb-fa2d-4797-8810-c2b891f375cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.503887] env[62476]: DEBUG nova.compute.manager [req-25162044-f1fc-49e6-8698-2cc0954d60b9 req-0c671f62-5148-4915-af0c-b65bea81c2f6 service nova] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] No waiting events found dispatching network-vif-plugged-e56ad835-6a7c-4744-884c-55e923967b3c {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1749.504225] env[62476]: WARNING nova.compute.manager [req-25162044-f1fc-49e6-8698-2cc0954d60b9 req-0c671f62-5148-4915-af0c-b65bea81c2f6 service nova] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Received unexpected event network-vif-plugged-e56ad835-6a7c-4744-884c-55e923967b3c for instance with vm_state building and task_state spawning. [ 1749.504516] env[62476]: DEBUG nova.compute.manager [req-25162044-f1fc-49e6-8698-2cc0954d60b9 req-0c671f62-5148-4915-af0c-b65bea81c2f6 service nova] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Received event network-changed-e56ad835-6a7c-4744-884c-55e923967b3c {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1749.504786] env[62476]: DEBUG nova.compute.manager [req-25162044-f1fc-49e6-8698-2cc0954d60b9 req-0c671f62-5148-4915-af0c-b65bea81c2f6 service nova] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Refreshing instance network info cache due to event network-changed-e56ad835-6a7c-4744-884c-55e923967b3c. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1749.505079] env[62476]: DEBUG oslo_concurrency.lockutils [req-25162044-f1fc-49e6-8698-2cc0954d60b9 req-0c671f62-5148-4915-af0c-b65bea81c2f6 service nova] Acquiring lock "refresh_cache-e6b815fb-fa2d-4797-8810-c2b891f375cf" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.677137] env[62476]: DEBUG nova.network.neutron [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Updating instance_info_cache with network_info: [{"id": "e56ad835-6a7c-4744-884c-55e923967b3c", "address": "fa:16:3e:80:c7:0c", "network": {"id": "7951b440-c6fc-4447-b736-de183b5d8603", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1845317819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cdbe9b66c724475a673e94fdb118821", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape56ad835-6a", "ovs_interfaceid": "e56ad835-6a7c-4744-884c-55e923967b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1749.690023] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Releasing lock "refresh_cache-e6b815fb-fa2d-4797-8810-c2b891f375cf" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1749.690303] env[62476]: DEBUG nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Instance network_info: |[{"id": "e56ad835-6a7c-4744-884c-55e923967b3c", "address": "fa:16:3e:80:c7:0c", "network": {"id": "7951b440-c6fc-4447-b736-de183b5d8603", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1845317819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cdbe9b66c724475a673e94fdb118821", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape56ad835-6a", "ovs_interfaceid": "e56ad835-6a7c-4744-884c-55e923967b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1749.690663] env[62476]: DEBUG oslo_concurrency.lockutils [req-25162044-f1fc-49e6-8698-2cc0954d60b9 req-0c671f62-5148-4915-af0c-b65bea81c2f6 service nova] Acquired lock "refresh_cache-e6b815fb-fa2d-4797-8810-c2b891f375cf" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.690897] env[62476]: DEBUG nova.network.neutron [req-25162044-f1fc-49e6-8698-2cc0954d60b9 req-0c671f62-5148-4915-af0c-b65bea81c2f6 service nova] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Refreshing network info cache for port e56ad835-6a7c-4744-884c-55e923967b3c {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1749.692181] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:c7:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24376631-ee89-4ff1-b8ac-f09911fc8329', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e56ad835-6a7c-4744-884c-55e923967b3c', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1749.699636] env[62476]: DEBUG oslo.service.loopingcall [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1749.703277] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1749.703746] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03b3b0a5-2bc8-4304-a0a3-81fa72134891 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.724162] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1749.724162] env[62476]: value = "task-4319161" [ 1749.724162] env[62476]: _type = "Task" [ 1749.724162] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.733696] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319161, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.235312] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319161, 'name': CreateVM_Task, 'duration_secs': 0.355943} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.236242] env[62476]: DEBUG nova.network.neutron [req-25162044-f1fc-49e6-8698-2cc0954d60b9 req-0c671f62-5148-4915-af0c-b65bea81c2f6 service nova] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Updated VIF entry in instance network info cache for port e56ad835-6a7c-4744-884c-55e923967b3c. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1750.236611] env[62476]: DEBUG nova.network.neutron [req-25162044-f1fc-49e6-8698-2cc0954d60b9 req-0c671f62-5148-4915-af0c-b65bea81c2f6 service nova] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Updating instance_info_cache with network_info: [{"id": "e56ad835-6a7c-4744-884c-55e923967b3c", "address": "fa:16:3e:80:c7:0c", "network": {"id": "7951b440-c6fc-4447-b736-de183b5d8603", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1845317819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cdbe9b66c724475a673e94fdb118821", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape56ad835-6a", "ovs_interfaceid": "e56ad835-6a7c-4744-884c-55e923967b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.237669] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1750.241029] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1750.241029] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1750.241029] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1750.241029] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b74cf9e-27b3-471e-973c-360978e0fe9a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.244588] env[62476]: DEBUG oslo_vmware.api [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for the task: (returnval){ [ 1750.244588] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52259fd0-a02f-f50d-d5d7-a71cc44ffb80" [ 1750.244588] env[62476]: _type = "Task" [ 1750.244588] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.249147] env[62476]: DEBUG oslo_concurrency.lockutils [req-25162044-f1fc-49e6-8698-2cc0954d60b9 req-0c671f62-5148-4915-af0c-b65bea81c2f6 service nova] Releasing lock "refresh_cache-e6b815fb-fa2d-4797-8810-c2b891f375cf" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1750.255831] env[62476]: DEBUG oslo_vmware.api [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52259fd0-a02f-f50d-d5d7-a71cc44ffb80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.756289] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1750.756608] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1750.756750] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1771.708332] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquiring lock "2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.708716] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Lock "2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.027493] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1778.039265] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.039488] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.039659] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.039818] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1778.040962] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc374705-0e7b-4c99-97fb-462daec01eea {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.049952] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd2dc93-e18e-431e-a1ee-371d29ffb009 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.064315] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd219ef6-0093-499b-91a6-4b3fb2e6bdde {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.071206] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6f30f7-2039-441f-9830-7a2fb9def3c9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.101294] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180728MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1778.101464] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.101625] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.276046] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 6f133a49-bb62-45c6-a014-a2f99766d092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.276046] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a0490305-7494-4612-843f-bac04dd0f328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.276046] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 139391d4-af04-4053-801a-792fc4fd724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.276046] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 003e332b-9765-4db7-9f48-40d33c6532d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.276281] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 27737774-efb5-4aee-a0c0-695e78a15dd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.276317] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance fe895d70-4c56-4854-83bf-a66cc1623d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.276455] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance c7e551af-a94e-48da-a725-53ebd73d43ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.276572] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ea606214-a34b-4972-8948-a6ff8c55b889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.276687] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1d67c106-ced2-4b4e-8abd-1652bd0509d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.276799] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e6b815fb-fa2d-4797-8810-c2b891f375cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.289160] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance cc61313f-d7db-4c5d-bb8e-1e516d2a89ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1778.301782] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 9497c622-7f14-4fc2-ac24-d611897a8be9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1778.312852] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3462762c-09da-473b-b2ba-4dce6c99dd8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1778.324762] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1778.324762] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1778.324762] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1778.339341] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing inventories for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1778.354356] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Updating ProviderTree inventory for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1778.354556] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Updating inventory in ProviderTree for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1778.365999] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing aggregate associations for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11, aggregates: None {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1778.385135] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing trait associations for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1778.558975] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf359fd4-1f38-40d9-a12e-63e41a700eb5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.567044] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc392f78-af09-4652-99aa-8077fc092de8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.597830] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aae00ad-4871-4c72-8751-2c048305697f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.605476] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ce78d7-5bb3-416f-a95b-728d2d39f2df {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.618649] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1778.627310] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1778.643318] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1778.643563] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.542s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.643282] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1781.643575] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1781.643612] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1781.667616] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1781.668158] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a0490305-7494-4612-843f-bac04dd0f328] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1781.668467] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1781.668733] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1781.669045] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1781.669330] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1781.670052] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1781.670052] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1781.670052] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1781.670052] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1781.670253] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1782.027079] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1782.027250] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1782.027308] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1784.027632] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1784.027916] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Cleaning up deleted instances with incomplete migration {{(pid=62476) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1785.036916] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1786.027557] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1787.026976] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1788.023772] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1788.026469] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1794.634893] env[62476]: WARNING oslo_vmware.rw_handles [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1794.634893] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1794.634893] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1794.634893] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1794.634893] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1794.634893] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1794.634893] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1794.634893] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1794.634893] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1794.634893] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1794.634893] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1794.634893] env[62476]: ERROR oslo_vmware.rw_handles [ 1794.635615] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/20f9fbc4-f080-473d-b20c-fe44173c4c48/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1794.637541] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1794.637805] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Copying Virtual Disk [datastore1] vmware_temp/20f9fbc4-f080-473d-b20c-fe44173c4c48/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/20f9fbc4-f080-473d-b20c-fe44173c4c48/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1794.638115] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16ea8c2d-b4a5-4cad-84cb-9b303962f105 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.646853] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Waiting for the task: (returnval){ [ 1794.646853] env[62476]: value = "task-4319162" [ 1794.646853] env[62476]: _type = "Task" [ 1794.646853] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.656111] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Task: {'id': task-4319162, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.026950] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1795.158070] env[62476]: DEBUG oslo_vmware.exceptions [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1795.158356] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.158919] env[62476]: ERROR nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1795.158919] env[62476]: Faults: ['InvalidArgument'] [ 1795.158919] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Traceback (most recent call last): [ 1795.158919] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1795.158919] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] yield resources [ 1795.158919] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1795.158919] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] self.driver.spawn(context, instance, image_meta, [ 1795.158919] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1795.158919] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1795.158919] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1795.158919] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] self._fetch_image_if_missing(context, vi) [ 1795.158919] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1795.159376] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] image_cache(vi, tmp_image_ds_loc) [ 1795.159376] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1795.159376] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] vm_util.copy_virtual_disk( [ 1795.159376] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1795.159376] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] session._wait_for_task(vmdk_copy_task) [ 1795.159376] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1795.159376] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] return self.wait_for_task(task_ref) [ 1795.159376] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1795.159376] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] return evt.wait() [ 1795.159376] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1795.159376] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] result = hub.switch() [ 1795.159376] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1795.159376] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] return self.greenlet.switch() [ 1795.159746] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1795.159746] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] self.f(*self.args, **self.kw) [ 1795.159746] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1795.159746] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] raise exceptions.translate_fault(task_info.error) [ 1795.159746] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1795.159746] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Faults: ['InvalidArgument'] [ 1795.159746] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] [ 1795.159746] env[62476]: INFO nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Terminating instance [ 1795.161013] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.161246] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1795.161517] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a4a0733-b91b-45fa-8e0b-bb68eca145db {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.164342] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1795.164586] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1795.165401] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12fa55d-a4fa-408e-9f84-7e9b9b5f5816 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.173825] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1795.174140] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3b1cb0a-1f1f-4a5b-93a7-1a9d33e9ec7d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.176827] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1795.177036] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1795.178162] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6768105-70b1-4729-a071-0f9e87f86b84 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.185206] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Waiting for the task: (returnval){ [ 1795.185206] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52aeb3b9-4535-a53f-42bf-ce7a5436e852" [ 1795.185206] env[62476]: _type = "Task" [ 1795.185206] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.194408] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52aeb3b9-4535-a53f-42bf-ce7a5436e852, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.266666] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1795.266938] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1795.267060] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Deleting the datastore file [datastore1] 6f133a49-bb62-45c6-a014-a2f99766d092 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1795.267362] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-671963ee-dff5-44e6-824c-213ccee8adce {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.274749] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Waiting for the task: (returnval){ [ 1795.274749] env[62476]: value = "task-4319164" [ 1795.274749] env[62476]: _type = "Task" [ 1795.274749] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.283635] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Task: {'id': task-4319164, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.697071] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1795.697493] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Creating directory with path [datastore1] vmware_temp/c835aa45-659f-4887-afb5-e532098c12ce/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1795.697610] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d040772f-2999-4f25-9ca4-6c0267ae60de {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.710154] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Created directory with path [datastore1] vmware_temp/c835aa45-659f-4887-afb5-e532098c12ce/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1795.710360] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Fetch image to [datastore1] vmware_temp/c835aa45-659f-4887-afb5-e532098c12ce/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1795.710534] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/c835aa45-659f-4887-afb5-e532098c12ce/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1795.711360] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fbee0d-accd-4fe7-8070-2e2de25cdde4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.719338] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a417e8-a7cb-4277-9a8a-6326dd861757 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.729460] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c1c1d5-c9c3-459e-8e33-bb559dc4893b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.762935] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc9f072-3bd5-48b6-a798-b9cf0dfd6806 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.770747] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-59718934-3c80-4205-9731-37bbb7cbaae6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.784444] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Task: {'id': task-4319164, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086204} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.784689] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1795.784873] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1795.785059] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1795.785245] env[62476]: INFO nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1795.787554] env[62476]: DEBUG nova.compute.claims [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1795.787755] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.787926] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.794516] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1795.968254] env[62476]: DEBUG oslo_vmware.rw_handles [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c835aa45-659f-4887-afb5-e532098c12ce/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1796.027990] env[62476]: DEBUG oslo_vmware.rw_handles [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1796.028200] env[62476]: DEBUG oslo_vmware.rw_handles [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c835aa45-659f-4887-afb5-e532098c12ce/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1796.032049] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1796.095418] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d088a4f4-e50f-4f7f-b83f-8d15fc95948a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.102675] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b69197d-f80e-464e-80f6-ac9570f35a51 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.135410] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e6a4610-6aa3-4f6e-80bc-568fb32e6c6e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.144113] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703282de-37c3-4336-92b2-4c2bf5f3f71d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.158808] env[62476]: DEBUG nova.compute.provider_tree [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1796.169574] env[62476]: DEBUG nova.scheduler.client.report [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1796.185561] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.397s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.186229] env[62476]: ERROR nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1796.186229] env[62476]: Faults: ['InvalidArgument'] [ 1796.186229] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Traceback (most recent call last): [ 1796.186229] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1796.186229] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] self.driver.spawn(context, instance, image_meta, [ 1796.186229] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1796.186229] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1796.186229] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1796.186229] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] self._fetch_image_if_missing(context, vi) [ 1796.186229] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1796.186229] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] image_cache(vi, tmp_image_ds_loc) [ 1796.186229] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1796.186711] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] vm_util.copy_virtual_disk( [ 1796.186711] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1796.186711] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] session._wait_for_task(vmdk_copy_task) [ 1796.186711] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1796.186711] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] return self.wait_for_task(task_ref) [ 1796.186711] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1796.186711] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] return evt.wait() [ 1796.186711] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1796.186711] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] result = hub.switch() [ 1796.186711] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1796.186711] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] return self.greenlet.switch() [ 1796.186711] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1796.186711] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] self.f(*self.args, **self.kw) [ 1796.187110] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1796.187110] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] raise exceptions.translate_fault(task_info.error) [ 1796.187110] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1796.187110] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Faults: ['InvalidArgument'] [ 1796.187110] env[62476]: ERROR nova.compute.manager [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] [ 1796.187110] env[62476]: DEBUG nova.compute.utils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1796.188594] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Build of instance 6f133a49-bb62-45c6-a014-a2f99766d092 was re-scheduled: A specified parameter was not correct: fileType [ 1796.188594] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1796.188973] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1796.189174] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1796.189367] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1796.189536] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1796.584982] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1796.598867] env[62476]: INFO nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Took 0.41 seconds to deallocate network for instance. [ 1796.712988] env[62476]: INFO nova.scheduler.client.report [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Deleted allocations for instance 6f133a49-bb62-45c6-a014-a2f99766d092 [ 1796.737326] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "6f133a49-bb62-45c6-a014-a2f99766d092" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 617.015s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.738530] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b37686ab-76b6-4cc2-a24f-5e14a160f0b9 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "6f133a49-bb62-45c6-a014-a2f99766d092" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 420.007s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.738753] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b37686ab-76b6-4cc2-a24f-5e14a160f0b9 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "6f133a49-bb62-45c6-a014-a2f99766d092-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.738962] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b37686ab-76b6-4cc2-a24f-5e14a160f0b9 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "6f133a49-bb62-45c6-a014-a2f99766d092-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.739146] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b37686ab-76b6-4cc2-a24f-5e14a160f0b9 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "6f133a49-bb62-45c6-a014-a2f99766d092-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.741588] env[62476]: INFO nova.compute.manager [None req-b37686ab-76b6-4cc2-a24f-5e14a160f0b9 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Terminating instance [ 1796.743427] env[62476]: DEBUG nova.compute.manager [None req-b37686ab-76b6-4cc2-a24f-5e14a160f0b9 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1796.743618] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b37686ab-76b6-4cc2-a24f-5e14a160f0b9 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1796.744442] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c4125e8-2563-413c-954b-f7e9f571c202 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.753876] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b34c971-7300-425e-90eb-7d2309d9bb6a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.765320] env[62476]: DEBUG nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1796.788630] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-b37686ab-76b6-4cc2-a24f-5e14a160f0b9 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6f133a49-bb62-45c6-a014-a2f99766d092 could not be found. [ 1796.788911] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b37686ab-76b6-4cc2-a24f-5e14a160f0b9 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1796.789168] env[62476]: INFO nova.compute.manager [None req-b37686ab-76b6-4cc2-a24f-5e14a160f0b9 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1796.789521] env[62476]: DEBUG oslo.service.loopingcall [None req-b37686ab-76b6-4cc2-a24f-5e14a160f0b9 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1796.789778] env[62476]: DEBUG nova.compute.manager [-] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1796.789879] env[62476]: DEBUG nova.network.neutron [-] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1796.818783] env[62476]: DEBUG nova.network.neutron [-] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1796.827260] env[62476]: INFO nova.compute.manager [-] [instance: 6f133a49-bb62-45c6-a014-a2f99766d092] Took 0.04 seconds to deallocate network for instance. [ 1796.828247] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.828503] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.830108] env[62476]: INFO nova.compute.claims [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1796.925908] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b37686ab-76b6-4cc2-a24f-5e14a160f0b9 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "6f133a49-bb62-45c6-a014-a2f99766d092" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.187s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.066072] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06c8945-b0eb-400b-96c5-e866b5628b12 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.075146] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71642f9b-2c2b-4da3-a01e-6d52512a5a9d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.106603] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8199d8bc-55d6-4892-8c3e-2a1f33f05ae6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.115264] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f963de6-5d24-4daf-b744-167ed51a76df {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.129801] env[62476]: DEBUG nova.compute.provider_tree [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1797.140038] env[62476]: DEBUG nova.scheduler.client.report [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1797.154849] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.326s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.155202] env[62476]: DEBUG nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1797.192086] env[62476]: DEBUG nova.compute.utils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1797.193164] env[62476]: DEBUG nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1797.195762] env[62476]: DEBUG nova.network.neutron [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1797.204678] env[62476]: DEBUG nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1797.262022] env[62476]: DEBUG nova.policy [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9281b2dcb9c0440495b676e3291d6d92', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1bc32d84f43a439396eacf3e9da5ad7d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1797.273094] env[62476]: DEBUG nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1797.300426] env[62476]: DEBUG nova.virt.hardware [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1797.300682] env[62476]: DEBUG nova.virt.hardware [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1797.300839] env[62476]: DEBUG nova.virt.hardware [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1797.301030] env[62476]: DEBUG nova.virt.hardware [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1797.301182] env[62476]: DEBUG nova.virt.hardware [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1797.301332] env[62476]: DEBUG nova.virt.hardware [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1797.301537] env[62476]: DEBUG nova.virt.hardware [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1797.301696] env[62476]: DEBUG nova.virt.hardware [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1797.301861] env[62476]: DEBUG nova.virt.hardware [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1797.302062] env[62476]: DEBUG nova.virt.hardware [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1797.302640] env[62476]: DEBUG nova.virt.hardware [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1797.303531] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff6499d-fe01-4035-ae31-760ee954fa21 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.312118] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e748a0a-d88a-4780-b306-823e4f686e9f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.692746] env[62476]: DEBUG nova.network.neutron [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Successfully created port: 109ed530-9d6c-4e86-bedb-49ceea8ca032 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1798.636033] env[62476]: DEBUG nova.compute.manager [req-ef6b004f-32a9-45ce-be30-4213979c6e2c req-6ac65ebe-ad8d-42a5-98e8-6670e0295624 service nova] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Received event network-vif-plugged-109ed530-9d6c-4e86-bedb-49ceea8ca032 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1798.636297] env[62476]: DEBUG oslo_concurrency.lockutils [req-ef6b004f-32a9-45ce-be30-4213979c6e2c req-6ac65ebe-ad8d-42a5-98e8-6670e0295624 service nova] Acquiring lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.636475] env[62476]: DEBUG oslo_concurrency.lockutils [req-ef6b004f-32a9-45ce-be30-4213979c6e2c req-6ac65ebe-ad8d-42a5-98e8-6670e0295624 service nova] Lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.636645] env[62476]: DEBUG oslo_concurrency.lockutils [req-ef6b004f-32a9-45ce-be30-4213979c6e2c req-6ac65ebe-ad8d-42a5-98e8-6670e0295624 service nova] Lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.636811] env[62476]: DEBUG nova.compute.manager [req-ef6b004f-32a9-45ce-be30-4213979c6e2c req-6ac65ebe-ad8d-42a5-98e8-6670e0295624 service nova] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] No waiting events found dispatching network-vif-plugged-109ed530-9d6c-4e86-bedb-49ceea8ca032 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1798.636978] env[62476]: WARNING nova.compute.manager [req-ef6b004f-32a9-45ce-be30-4213979c6e2c req-6ac65ebe-ad8d-42a5-98e8-6670e0295624 service nova] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Received unexpected event network-vif-plugged-109ed530-9d6c-4e86-bedb-49ceea8ca032 for instance with vm_state building and task_state spawning. [ 1798.788743] env[62476]: DEBUG nova.network.neutron [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Successfully updated port: 109ed530-9d6c-4e86-bedb-49ceea8ca032 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1798.806337] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "refresh_cache-cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.806554] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquired lock "refresh_cache-cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.806772] env[62476]: DEBUG nova.network.neutron [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1798.857181] env[62476]: DEBUG nova.network.neutron [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1799.047649] env[62476]: DEBUG nova.network.neutron [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Updating instance_info_cache with network_info: [{"id": "109ed530-9d6c-4e86-bedb-49ceea8ca032", "address": "fa:16:3e:99:87:ca", "network": {"id": "87b70a9c-f6b7-4743-911a-9e2d0a108ca8", "bridge": "br-int", "label": "tempest-ServersTestJSON-742676103-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bc32d84f43a439396eacf3e9da5ad7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap109ed530-9d", "ovs_interfaceid": "109ed530-9d6c-4e86-bedb-49ceea8ca032", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1799.060229] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Releasing lock "refresh_cache-cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.060554] env[62476]: DEBUG nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Instance network_info: |[{"id": "109ed530-9d6c-4e86-bedb-49ceea8ca032", "address": "fa:16:3e:99:87:ca", "network": {"id": "87b70a9c-f6b7-4743-911a-9e2d0a108ca8", "bridge": "br-int", "label": "tempest-ServersTestJSON-742676103-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bc32d84f43a439396eacf3e9da5ad7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap109ed530-9d", "ovs_interfaceid": "109ed530-9d6c-4e86-bedb-49ceea8ca032", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1799.060961] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:87:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '109ed530-9d6c-4e86-bedb-49ceea8ca032', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1799.068896] env[62476]: DEBUG oslo.service.loopingcall [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1799.069675] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1799.069756] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-681ce1ad-2894-478d-886c-85860436b366 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.091180] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1799.091180] env[62476]: value = "task-4319165" [ 1799.091180] env[62476]: _type = "Task" [ 1799.091180] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.100202] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319165, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.604708] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319165, 'name': CreateVM_Task, 'duration_secs': 0.343577} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.604913] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1799.605658] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.605853] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.606195] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1799.606454] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d723f44f-62db-4ebd-a93a-95da1b4c8e0d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.611527] env[62476]: DEBUG oslo_vmware.api [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Waiting for the task: (returnval){ [ 1799.611527] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52c994c8-dc39-d8c9-47a4-1c3e63eb902b" [ 1799.611527] env[62476]: _type = "Task" [ 1799.611527] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.620231] env[62476]: DEBUG oslo_vmware.api [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52c994c8-dc39-d8c9-47a4-1c3e63eb902b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.123202] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.123629] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1800.123629] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1800.667801] env[62476]: DEBUG nova.compute.manager [req-81c934ec-64d5-4b17-a1fd-866467911179 req-a979f8a6-b87f-494a-a0fd-8620aaf07e5b service nova] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Received event network-changed-109ed530-9d6c-4e86-bedb-49ceea8ca032 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1800.668015] env[62476]: DEBUG nova.compute.manager [req-81c934ec-64d5-4b17-a1fd-866467911179 req-a979f8a6-b87f-494a-a0fd-8620aaf07e5b service nova] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Refreshing instance network info cache due to event network-changed-109ed530-9d6c-4e86-bedb-49ceea8ca032. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1800.668248] env[62476]: DEBUG oslo_concurrency.lockutils [req-81c934ec-64d5-4b17-a1fd-866467911179 req-a979f8a6-b87f-494a-a0fd-8620aaf07e5b service nova] Acquiring lock "refresh_cache-cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1800.668393] env[62476]: DEBUG oslo_concurrency.lockutils [req-81c934ec-64d5-4b17-a1fd-866467911179 req-a979f8a6-b87f-494a-a0fd-8620aaf07e5b service nova] Acquired lock "refresh_cache-cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1800.668553] env[62476]: DEBUG nova.network.neutron [req-81c934ec-64d5-4b17-a1fd-866467911179 req-a979f8a6-b87f-494a-a0fd-8620aaf07e5b service nova] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Refreshing network info cache for port 109ed530-9d6c-4e86-bedb-49ceea8ca032 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1800.986742] env[62476]: DEBUG nova.network.neutron [req-81c934ec-64d5-4b17-a1fd-866467911179 req-a979f8a6-b87f-494a-a0fd-8620aaf07e5b service nova] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Updated VIF entry in instance network info cache for port 109ed530-9d6c-4e86-bedb-49ceea8ca032. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1800.987105] env[62476]: DEBUG nova.network.neutron [req-81c934ec-64d5-4b17-a1fd-866467911179 req-a979f8a6-b87f-494a-a0fd-8620aaf07e5b service nova] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Updating instance_info_cache with network_info: [{"id": "109ed530-9d6c-4e86-bedb-49ceea8ca032", "address": "fa:16:3e:99:87:ca", "network": {"id": "87b70a9c-f6b7-4743-911a-9e2d0a108ca8", "bridge": "br-int", "label": "tempest-ServersTestJSON-742676103-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bc32d84f43a439396eacf3e9da5ad7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap109ed530-9d", "ovs_interfaceid": "109ed530-9d6c-4e86-bedb-49ceea8ca032", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.999168] env[62476]: DEBUG oslo_concurrency.lockutils [req-81c934ec-64d5-4b17-a1fd-866467911179 req-a979f8a6-b87f-494a-a0fd-8620aaf07e5b service nova] Releasing lock "refresh_cache-cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.027166] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1802.027515] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Cleaning up deleted instances {{(pid=62476) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1802.039771] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] There are 0 instances to clean {{(pid=62476) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1814.402585] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_power_states {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1814.425498] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Getting list of instances from cluster (obj){ [ 1814.425498] env[62476]: value = "domain-c8" [ 1814.425498] env[62476]: _type = "ClusterComputeResource" [ 1814.425498] env[62476]: } {{(pid=62476) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1814.426927] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74c9637-6d71-4cbf-bf79-6723dfd87365 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.445046] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Got total of 10 instances {{(pid=62476) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1814.445303] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid a0490305-7494-4612-843f-bac04dd0f328 {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1814.445448] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid 139391d4-af04-4053-801a-792fc4fd724a {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1814.445615] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid 003e332b-9765-4db7-9f48-40d33c6532d1 {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1814.445780] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid 27737774-efb5-4aee-a0c0-695e78a15dd6 {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1814.445935] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid fe895d70-4c56-4854-83bf-a66cc1623d59 {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1814.446105] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid c7e551af-a94e-48da-a725-53ebd73d43ee {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1814.446310] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid ea606214-a34b-4972-8948-a6ff8c55b889 {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1814.446507] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid 1d67c106-ced2-4b4e-8abd-1652bd0509d1 {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1814.446743] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid e6b815fb-fa2d-4797-8810-c2b891f375cf {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1814.446964] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Triggering sync for uuid cc61313f-d7db-4c5d-bb8e-1e516d2a89ce {{(pid=62476) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1814.447379] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "a0490305-7494-4612-843f-bac04dd0f328" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.447559] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "139391d4-af04-4053-801a-792fc4fd724a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.447763] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "003e332b-9765-4db7-9f48-40d33c6532d1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.447959] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "27737774-efb5-4aee-a0c0-695e78a15dd6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.448169] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "fe895d70-4c56-4854-83bf-a66cc1623d59" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.449084] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "c7e551af-a94e-48da-a725-53ebd73d43ee" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.449084] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "ea606214-a34b-4972-8948-a6ff8c55b889" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.449084] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.449084] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "e6b815fb-fa2d-4797-8810-c2b891f375cf" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.449231] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.840672] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5bfeb01f-7edf-409c-bbd5-1438a28cc240 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "e6b815fb-fa2d-4797-8810-c2b891f375cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.027682] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.039888] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.040174] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.040397] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.040559] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1838.041734] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccbf7de-7ef8-463d-984b-1fb33fa5e8a3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.050651] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bfefbb9-68d0-4402-846b-9127818588ce {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.066408] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1930f24-b374-4ee7-ba81-e4b5904f507d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.073508] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7af217e-b03b-489f-b1ff-7a0d73751bf8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.102541] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180666MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1838.102541] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.102909] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.178240] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance a0490305-7494-4612-843f-bac04dd0f328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1838.178348] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 139391d4-af04-4053-801a-792fc4fd724a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1838.178475] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 003e332b-9765-4db7-9f48-40d33c6532d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1838.178610] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 27737774-efb5-4aee-a0c0-695e78a15dd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1838.178809] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance fe895d70-4c56-4854-83bf-a66cc1623d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1838.178948] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance c7e551af-a94e-48da-a725-53ebd73d43ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1838.179083] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ea606214-a34b-4972-8948-a6ff8c55b889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1838.179203] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1d67c106-ced2-4b4e-8abd-1652bd0509d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1838.179317] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e6b815fb-fa2d-4797-8810-c2b891f375cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1838.179429] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance cc61313f-d7db-4c5d-bb8e-1e516d2a89ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1838.190923] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 9497c622-7f14-4fc2-ac24-d611897a8be9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1838.201717] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3462762c-09da-473b-b2ba-4dce6c99dd8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1838.211855] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1838.212090] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1838.212244] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1838.384210] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9e3a57-8184-4a04-9e3d-a6c67a15e9ba {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.392244] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ccac25-9ed5-445e-b584-8cb1e543a183 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.423118] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad96eca1-b95d-493a-9868-4b490acd3bab {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.430713] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792ec1bc-8e67-4fd1-9f0b-bd515ae15a4c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.444756] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1838.456031] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1838.472891] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1838.473110] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.370s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.474452] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1842.474982] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1842.474982] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1842.500194] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a0490305-7494-4612-843f-bac04dd0f328] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1842.500367] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1842.500521] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1842.500673] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1842.500802] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1842.500927] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1842.501060] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1842.501184] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1842.501302] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1842.501418] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1842.501536] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1843.026712] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1843.026915] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1843.129517] env[62476]: WARNING oslo_vmware.rw_handles [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1843.129517] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1843.129517] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1843.129517] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1843.129517] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1843.129517] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1843.129517] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1843.129517] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1843.129517] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1843.129517] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1843.129517] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1843.129517] env[62476]: ERROR oslo_vmware.rw_handles [ 1843.130334] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/c835aa45-659f-4887-afb5-e532098c12ce/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1843.132319] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1843.132641] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Copying Virtual Disk [datastore1] vmware_temp/c835aa45-659f-4887-afb5-e532098c12ce/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/c835aa45-659f-4887-afb5-e532098c12ce/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1843.132956] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0fbbafa4-0ed8-48cd-9d20-54dfc02bf52a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.142531] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Waiting for the task: (returnval){ [ 1843.142531] env[62476]: value = "task-4319166" [ 1843.142531] env[62476]: _type = "Task" [ 1843.142531] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.150645] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Task: {'id': task-4319166, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.652359] env[62476]: DEBUG oslo_vmware.exceptions [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1843.652776] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.653245] env[62476]: ERROR nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1843.653245] env[62476]: Faults: ['InvalidArgument'] [ 1843.653245] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] Traceback (most recent call last): [ 1843.653245] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1843.653245] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] yield resources [ 1843.653245] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1843.653245] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] self.driver.spawn(context, instance, image_meta, [ 1843.653245] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1843.653245] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1843.653245] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1843.653245] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] self._fetch_image_if_missing(context, vi) [ 1843.653245] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1843.653617] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] image_cache(vi, tmp_image_ds_loc) [ 1843.653617] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1843.653617] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] vm_util.copy_virtual_disk( [ 1843.653617] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1843.653617] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] session._wait_for_task(vmdk_copy_task) [ 1843.653617] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1843.653617] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] return self.wait_for_task(task_ref) [ 1843.653617] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1843.653617] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] return evt.wait() [ 1843.653617] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1843.653617] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] result = hub.switch() [ 1843.653617] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1843.653617] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] return self.greenlet.switch() [ 1843.653981] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1843.653981] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] self.f(*self.args, **self.kw) [ 1843.653981] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1843.653981] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] raise exceptions.translate_fault(task_info.error) [ 1843.653981] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1843.653981] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] Faults: ['InvalidArgument'] [ 1843.653981] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] [ 1843.653981] env[62476]: INFO nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Terminating instance [ 1843.655161] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.655368] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1843.655609] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43db6aa4-d6c8-439a-b12f-5e29a98b119a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.657768] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1843.657965] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1843.658695] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26531a7-92ae-4b0e-950a-29eb45afaa09 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.665956] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1843.666114] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0092cde-44b6-47c8-a3df-f056afd70a4b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.668623] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1843.668804] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1843.669797] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13920ec3-c37b-4889-bd47-fa38c405b915 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.675593] env[62476]: DEBUG oslo_vmware.api [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for the task: (returnval){ [ 1843.675593] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]5272944f-0f47-10c6-bfb8-068f5ec56138" [ 1843.675593] env[62476]: _type = "Task" [ 1843.675593] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.684994] env[62476]: DEBUG oslo_vmware.api [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]5272944f-0f47-10c6-bfb8-068f5ec56138, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.744104] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1843.744377] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1843.744523] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Deleting the datastore file [datastore1] a0490305-7494-4612-843f-bac04dd0f328 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1843.744858] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9100fff-89e6-4a3a-9dd0-d22ae6be9860 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.753010] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Waiting for the task: (returnval){ [ 1843.753010] env[62476]: value = "task-4319168" [ 1843.753010] env[62476]: _type = "Task" [ 1843.753010] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.761207] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Task: {'id': task-4319168, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.028545] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1844.186815] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1844.186815] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Creating directory with path [datastore1] vmware_temp/9b7af4fe-93e0-4289-a281-e65cb515c8a7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1844.186815] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b809ba95-a06c-405d-bce3-50b961291777 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.199014] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Created directory with path [datastore1] vmware_temp/9b7af4fe-93e0-4289-a281-e65cb515c8a7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1844.199227] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Fetch image to [datastore1] vmware_temp/9b7af4fe-93e0-4289-a281-e65cb515c8a7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1844.199394] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/9b7af4fe-93e0-4289-a281-e65cb515c8a7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1844.200147] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c330ea30-cf0c-4e00-b59d-fdba7f464fc4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.206778] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201594dc-1638-4180-ae1b-088da340623c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.216553] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f62fe2-958d-4c83-aec2-58eae249b4f7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.246099] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8929f0f6-2b32-4ac3-9dad-075d1b99bcb8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.252244] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-84be39a7-6dbb-4529-8e9e-43bdff55e21d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.261125] env[62476]: DEBUG oslo_vmware.api [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Task: {'id': task-4319168, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073897} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.261357] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1844.261537] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1844.261706] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1844.261877] env[62476]: INFO nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1844.264090] env[62476]: DEBUG nova.compute.claims [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1844.264261] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.264475] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.276732] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1844.332397] env[62476]: DEBUG oslo_vmware.rw_handles [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9b7af4fe-93e0-4289-a281-e65cb515c8a7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1844.394778] env[62476]: DEBUG oslo_vmware.rw_handles [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1844.394976] env[62476]: DEBUG oslo_vmware.rw_handles [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9b7af4fe-93e0-4289-a281-e65cb515c8a7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1844.528607] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3d867c-9337-410c-a78c-ee5dca060c7f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.536759] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7d8414-3141-4ffd-9e37-b17ef8ae1618 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.566960] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1806f6bb-6714-4ea2-81fb-2450bc281a77 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.574836] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42c0989-95b8-46cf-8478-9f7c54683e0c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.588172] env[62476]: DEBUG nova.compute.provider_tree [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1844.597185] env[62476]: DEBUG nova.scheduler.client.report [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1844.612440] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.348s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.613015] env[62476]: ERROR nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1844.613015] env[62476]: Faults: ['InvalidArgument'] [ 1844.613015] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] Traceback (most recent call last): [ 1844.613015] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1844.613015] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] self.driver.spawn(context, instance, image_meta, [ 1844.613015] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1844.613015] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1844.613015] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1844.613015] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] self._fetch_image_if_missing(context, vi) [ 1844.613015] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1844.613015] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] image_cache(vi, tmp_image_ds_loc) [ 1844.613015] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1844.613416] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] vm_util.copy_virtual_disk( [ 1844.613416] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1844.613416] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] session._wait_for_task(vmdk_copy_task) [ 1844.613416] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1844.613416] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] return self.wait_for_task(task_ref) [ 1844.613416] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1844.613416] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] return evt.wait() [ 1844.613416] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1844.613416] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] result = hub.switch() [ 1844.613416] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1844.613416] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] return self.greenlet.switch() [ 1844.613416] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1844.613416] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] self.f(*self.args, **self.kw) [ 1844.613860] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1844.613860] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] raise exceptions.translate_fault(task_info.error) [ 1844.613860] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1844.613860] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] Faults: ['InvalidArgument'] [ 1844.613860] env[62476]: ERROR nova.compute.manager [instance: a0490305-7494-4612-843f-bac04dd0f328] [ 1844.613860] env[62476]: DEBUG nova.compute.utils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1844.615320] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Build of instance a0490305-7494-4612-843f-bac04dd0f328 was re-scheduled: A specified parameter was not correct: fileType [ 1844.615320] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1844.615695] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1844.615867] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1844.616049] env[62476]: DEBUG nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1844.616247] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1845.007484] env[62476]: DEBUG nova.network.neutron [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.020747] env[62476]: INFO nova.compute.manager [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Took 0.40 seconds to deallocate network for instance. [ 1845.027375] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1845.126549] env[62476]: INFO nova.scheduler.client.report [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Deleted allocations for instance a0490305-7494-4612-843f-bac04dd0f328 [ 1845.150372] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c9df646a-6b46-41ee-8fcc-3a2398604d6b tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "a0490305-7494-4612-843f-bac04dd0f328" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 665.399s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.151631] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a55db418-8e7c-49cb-86e5-b471c9f225e2 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "a0490305-7494-4612-843f-bac04dd0f328" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 468.498s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.151947] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a55db418-8e7c-49cb-86e5-b471c9f225e2 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Acquiring lock "a0490305-7494-4612-843f-bac04dd0f328-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.152362] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a55db418-8e7c-49cb-86e5-b471c9f225e2 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "a0490305-7494-4612-843f-bac04dd0f328-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.152362] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a55db418-8e7c-49cb-86e5-b471c9f225e2 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "a0490305-7494-4612-843f-bac04dd0f328-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.156958] env[62476]: INFO nova.compute.manager [None req-a55db418-8e7c-49cb-86e5-b471c9f225e2 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Terminating instance [ 1845.159013] env[62476]: DEBUG nova.compute.manager [None req-a55db418-8e7c-49cb-86e5-b471c9f225e2 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1845.159238] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a55db418-8e7c-49cb-86e5-b471c9f225e2 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1845.159516] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da6ad047-3d2c-4e68-9eec-4173dfec4ab6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.163671] env[62476]: DEBUG nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1845.174109] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba34e00-a1f0-4c48-9dc4-542742834ff1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.207815] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-a55db418-8e7c-49cb-86e5-b471c9f225e2 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a0490305-7494-4612-843f-bac04dd0f328 could not be found. [ 1845.208096] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-a55db418-8e7c-49cb-86e5-b471c9f225e2 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1845.208344] env[62476]: INFO nova.compute.manager [None req-a55db418-8e7c-49cb-86e5-b471c9f225e2 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] [instance: a0490305-7494-4612-843f-bac04dd0f328] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1845.209199] env[62476]: DEBUG oslo.service.loopingcall [None req-a55db418-8e7c-49cb-86e5-b471c9f225e2 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1845.213111] env[62476]: DEBUG nova.compute.manager [-] [instance: a0490305-7494-4612-843f-bac04dd0f328] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1845.213211] env[62476]: DEBUG nova.network.neutron [-] [instance: a0490305-7494-4612-843f-bac04dd0f328] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1845.232921] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.233232] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.234773] env[62476]: INFO nova.compute.claims [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1845.252950] env[62476]: DEBUG nova.network.neutron [-] [instance: a0490305-7494-4612-843f-bac04dd0f328] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.271646] env[62476]: INFO nova.compute.manager [-] [instance: a0490305-7494-4612-843f-bac04dd0f328] Took 0.06 seconds to deallocate network for instance. [ 1845.365930] env[62476]: DEBUG oslo_concurrency.lockutils [None req-a55db418-8e7c-49cb-86e5-b471c9f225e2 tempest-ListServersNegativeTestJSON-1938914885 tempest-ListServersNegativeTestJSON-1938914885-project-member] Lock "a0490305-7494-4612-843f-bac04dd0f328" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.214s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.367525] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "a0490305-7494-4612-843f-bac04dd0f328" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 30.920s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.367525] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: a0490305-7494-4612-843f-bac04dd0f328] During sync_power_state the instance has a pending task (deleting). Skip. [ 1845.367525] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "a0490305-7494-4612-843f-bac04dd0f328" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.463783] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b142aa6-83fd-4758-8183-c584addeed0a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.471827] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29543292-721c-4cd7-a2fe-83549da0bb27 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.501919] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ddb5725-46e9-4382-ade3-98f99a38d439 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.510714] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba8da1b-abff-435b-935f-409ccb858063 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.527850] env[62476]: DEBUG nova.compute.provider_tree [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1845.537786] env[62476]: DEBUG nova.scheduler.client.report [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1845.552815] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.319s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.553425] env[62476]: DEBUG nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1845.595113] env[62476]: DEBUG nova.compute.utils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1845.596418] env[62476]: DEBUG nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1845.596564] env[62476]: DEBUG nova.network.neutron [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1845.607242] env[62476]: DEBUG nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1845.663645] env[62476]: DEBUG nova.policy [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84bfe931cb0149198cbfa760bda985fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '69b584dd19a64cf7884d0302529d4de2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1845.672596] env[62476]: DEBUG nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1845.701519] env[62476]: DEBUG nova.virt.hardware [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1845.701793] env[62476]: DEBUG nova.virt.hardware [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1845.701951] env[62476]: DEBUG nova.virt.hardware [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1845.702155] env[62476]: DEBUG nova.virt.hardware [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1845.702307] env[62476]: DEBUG nova.virt.hardware [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1845.702494] env[62476]: DEBUG nova.virt.hardware [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1845.702900] env[62476]: DEBUG nova.virt.hardware [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1845.703230] env[62476]: DEBUG nova.virt.hardware [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1845.703550] env[62476]: DEBUG nova.virt.hardware [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1845.703831] env[62476]: DEBUG nova.virt.hardware [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1845.704189] env[62476]: DEBUG nova.virt.hardware [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1845.705205] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e1ff89-5f3b-4224-95fa-f1204c3ebed8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.713783] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1745d2e7-36c7-4ded-bccd-9860c9d68032 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.027162] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1846.062840] env[62476]: DEBUG nova.network.neutron [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Successfully created port: 4bd2793b-2b02-4c4b-9731-4d4f45d43734 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1846.785919] env[62476]: DEBUG nova.network.neutron [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Successfully updated port: 4bd2793b-2b02-4c4b-9731-4d4f45d43734 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1846.797203] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "refresh_cache-9497c622-7f14-4fc2-ac24-d611897a8be9" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.797419] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquired lock "refresh_cache-9497c622-7f14-4fc2-ac24-d611897a8be9" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.797508] env[62476]: DEBUG nova.network.neutron [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1846.848718] env[62476]: DEBUG nova.network.neutron [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1847.033360] env[62476]: DEBUG nova.network.neutron [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Updating instance_info_cache with network_info: [{"id": "4bd2793b-2b02-4c4b-9731-4d4f45d43734", "address": "fa:16:3e:d9:0a:e2", "network": {"id": "541ae4a8-5066-487b-9521-ddc3013eecf0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-226251912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69b584dd19a64cf7884d0302529d4de2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd2793b-2b", "ovs_interfaceid": "4bd2793b-2b02-4c4b-9731-4d4f45d43734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.044597] env[62476]: DEBUG nova.compute.manager [req-ceaf5143-eef9-4917-a9b4-824eb319cf02 req-e66a0514-a5e8-4298-976f-26e35ac3b5dd service nova] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Received event network-vif-plugged-4bd2793b-2b02-4c4b-9731-4d4f45d43734 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1847.044826] env[62476]: DEBUG oslo_concurrency.lockutils [req-ceaf5143-eef9-4917-a9b4-824eb319cf02 req-e66a0514-a5e8-4298-976f-26e35ac3b5dd service nova] Acquiring lock "9497c622-7f14-4fc2-ac24-d611897a8be9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.045097] env[62476]: DEBUG oslo_concurrency.lockutils [req-ceaf5143-eef9-4917-a9b4-824eb319cf02 req-e66a0514-a5e8-4298-976f-26e35ac3b5dd service nova] Lock "9497c622-7f14-4fc2-ac24-d611897a8be9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.045214] env[62476]: DEBUG oslo_concurrency.lockutils [req-ceaf5143-eef9-4917-a9b4-824eb319cf02 req-e66a0514-a5e8-4298-976f-26e35ac3b5dd service nova] Lock "9497c622-7f14-4fc2-ac24-d611897a8be9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.045381] env[62476]: DEBUG nova.compute.manager [req-ceaf5143-eef9-4917-a9b4-824eb319cf02 req-e66a0514-a5e8-4298-976f-26e35ac3b5dd service nova] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] No waiting events found dispatching network-vif-plugged-4bd2793b-2b02-4c4b-9731-4d4f45d43734 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1847.045547] env[62476]: WARNING nova.compute.manager [req-ceaf5143-eef9-4917-a9b4-824eb319cf02 req-e66a0514-a5e8-4298-976f-26e35ac3b5dd service nova] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Received unexpected event network-vif-plugged-4bd2793b-2b02-4c4b-9731-4d4f45d43734 for instance with vm_state building and task_state spawning. [ 1847.045715] env[62476]: DEBUG nova.compute.manager [req-ceaf5143-eef9-4917-a9b4-824eb319cf02 req-e66a0514-a5e8-4298-976f-26e35ac3b5dd service nova] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Received event network-changed-4bd2793b-2b02-4c4b-9731-4d4f45d43734 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1847.045854] env[62476]: DEBUG nova.compute.manager [req-ceaf5143-eef9-4917-a9b4-824eb319cf02 req-e66a0514-a5e8-4298-976f-26e35ac3b5dd service nova] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Refreshing instance network info cache due to event network-changed-4bd2793b-2b02-4c4b-9731-4d4f45d43734. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1847.046066] env[62476]: DEBUG oslo_concurrency.lockutils [req-ceaf5143-eef9-4917-a9b4-824eb319cf02 req-e66a0514-a5e8-4298-976f-26e35ac3b5dd service nova] Acquiring lock "refresh_cache-9497c622-7f14-4fc2-ac24-d611897a8be9" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1847.047582] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Releasing lock "refresh_cache-9497c622-7f14-4fc2-ac24-d611897a8be9" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.047853] env[62476]: DEBUG nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Instance network_info: |[{"id": "4bd2793b-2b02-4c4b-9731-4d4f45d43734", "address": "fa:16:3e:d9:0a:e2", "network": {"id": "541ae4a8-5066-487b-9521-ddc3013eecf0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-226251912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69b584dd19a64cf7884d0302529d4de2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd2793b-2b", "ovs_interfaceid": "4bd2793b-2b02-4c4b-9731-4d4f45d43734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1847.048355] env[62476]: DEBUG oslo_concurrency.lockutils [req-ceaf5143-eef9-4917-a9b4-824eb319cf02 req-e66a0514-a5e8-4298-976f-26e35ac3b5dd service nova] Acquired lock "refresh_cache-9497c622-7f14-4fc2-ac24-d611897a8be9" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1847.048537] env[62476]: DEBUG nova.network.neutron [req-ceaf5143-eef9-4917-a9b4-824eb319cf02 req-e66a0514-a5e8-4298-976f-26e35ac3b5dd service nova] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Refreshing network info cache for port 4bd2793b-2b02-4c4b-9731-4d4f45d43734 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1847.049608] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:0a:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '399f3826-705c-45f7-9fe0-3a08a945151a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4bd2793b-2b02-4c4b-9731-4d4f45d43734', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1847.057330] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Creating folder: Project (69b584dd19a64cf7884d0302529d4de2). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1847.058424] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-293d51ab-2bfc-4238-b7bc-4e9574926f47 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.072662] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Created folder: Project (69b584dd19a64cf7884d0302529d4de2) in parent group-v849485. [ 1847.072901] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Creating folder: Instances. Parent ref: group-v849573. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1847.073154] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a15c6b77-d567-4575-be3f-0c237cea7fc6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.082817] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Created folder: Instances in parent group-v849573. [ 1847.083083] env[62476]: DEBUG oslo.service.loopingcall [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1847.083276] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1847.083486] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2a4c718-3358-477e-9948-179cb618a6c2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.106791] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1847.106791] env[62476]: value = "task-4319171" [ 1847.106791] env[62476]: _type = "Task" [ 1847.106791] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.115419] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319171, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.617156] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319171, 'name': CreateVM_Task, 'duration_secs': 0.328444} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.617341] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1847.618016] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1847.618193] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1847.618520] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1847.618774] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19c5129c-47fe-4f88-89d6-1f7d45ee9835 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.621455] env[62476]: DEBUG nova.network.neutron [req-ceaf5143-eef9-4917-a9b4-824eb319cf02 req-e66a0514-a5e8-4298-976f-26e35ac3b5dd service nova] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Updated VIF entry in instance network info cache for port 4bd2793b-2b02-4c4b-9731-4d4f45d43734. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1847.621771] env[62476]: DEBUG nova.network.neutron [req-ceaf5143-eef9-4917-a9b4-824eb319cf02 req-e66a0514-a5e8-4298-976f-26e35ac3b5dd service nova] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Updating instance_info_cache with network_info: [{"id": "4bd2793b-2b02-4c4b-9731-4d4f45d43734", "address": "fa:16:3e:d9:0a:e2", "network": {"id": "541ae4a8-5066-487b-9521-ddc3013eecf0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-226251912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69b584dd19a64cf7884d0302529d4de2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bd2793b-2b", "ovs_interfaceid": "4bd2793b-2b02-4c4b-9731-4d4f45d43734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.624027] env[62476]: DEBUG oslo_vmware.api [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Waiting for the task: (returnval){ [ 1847.624027] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52a226a7-12bf-98ea-0384-d4a81ee00c8d" [ 1847.624027] env[62476]: _type = "Task" [ 1847.624027] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.631977] env[62476]: DEBUG oslo_concurrency.lockutils [req-ceaf5143-eef9-4917-a9b4-824eb319cf02 req-e66a0514-a5e8-4298-976f-26e35ac3b5dd service nova] Releasing lock "refresh_cache-9497c622-7f14-4fc2-ac24-d611897a8be9" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.635189] env[62476]: DEBUG oslo_vmware.api [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52a226a7-12bf-98ea-0384-d4a81ee00c8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.134468] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1848.134832] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1848.134885] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1849.027350] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1850.022665] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1850.026313] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1862.750327] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "8fdd45f2-0c21-461f-896e-698182bd5337" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1862.750737] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "8fdd45f2-0c21-461f-896e-698182bd5337" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.209024] env[62476]: WARNING oslo_vmware.rw_handles [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1890.209024] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1890.209024] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1890.209024] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1890.209024] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1890.209024] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1890.209024] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1890.209024] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1890.209024] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1890.209024] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1890.209024] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1890.209024] env[62476]: ERROR oslo_vmware.rw_handles [ 1890.209892] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/9b7af4fe-93e0-4289-a281-e65cb515c8a7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1890.211683] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1890.211936] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Copying Virtual Disk [datastore1] vmware_temp/9b7af4fe-93e0-4289-a281-e65cb515c8a7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/9b7af4fe-93e0-4289-a281-e65cb515c8a7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1890.212263] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5a5eb18-8eda-4dfb-918e-9c1f13a60a41 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.221128] env[62476]: DEBUG oslo_vmware.api [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for the task: (returnval){ [ 1890.221128] env[62476]: value = "task-4319172" [ 1890.221128] env[62476]: _type = "Task" [ 1890.221128] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.229455] env[62476]: DEBUG oslo_vmware.api [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': task-4319172, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.731649] env[62476]: DEBUG oslo_vmware.exceptions [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1890.731942] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1890.732539] env[62476]: ERROR nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1890.732539] env[62476]: Faults: ['InvalidArgument'] [ 1890.732539] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] Traceback (most recent call last): [ 1890.732539] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1890.732539] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] yield resources [ 1890.732539] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1890.732539] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] self.driver.spawn(context, instance, image_meta, [ 1890.732539] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1890.732539] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1890.732539] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1890.732539] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] self._fetch_image_if_missing(context, vi) [ 1890.732539] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1890.732937] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] image_cache(vi, tmp_image_ds_loc) [ 1890.732937] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1890.732937] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] vm_util.copy_virtual_disk( [ 1890.732937] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1890.732937] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] session._wait_for_task(vmdk_copy_task) [ 1890.732937] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1890.732937] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] return self.wait_for_task(task_ref) [ 1890.732937] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1890.732937] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] return evt.wait() [ 1890.732937] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1890.732937] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] result = hub.switch() [ 1890.732937] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1890.732937] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] return self.greenlet.switch() [ 1890.733625] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1890.733625] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] self.f(*self.args, **self.kw) [ 1890.733625] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1890.733625] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] raise exceptions.translate_fault(task_info.error) [ 1890.733625] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1890.733625] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] Faults: ['InvalidArgument'] [ 1890.733625] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] [ 1890.733625] env[62476]: INFO nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Terminating instance [ 1890.734508] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1890.734726] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1890.734959] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d43fec2d-1b3a-4f54-9ee6-bd8fbf6521f2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.737148] env[62476]: DEBUG nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1890.737349] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1890.738088] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa77abf9-95cd-4131-ac3e-2ebd05e1f146 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.744994] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1890.745227] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a29c73e-844d-4034-b2e2-d84a70cbc8fc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.747446] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1890.747614] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1890.748563] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f99538bc-2ecd-46b2-9424-311277e30e98 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.753505] env[62476]: DEBUG oslo_vmware.api [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Waiting for the task: (returnval){ [ 1890.753505] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]5273139c-f3df-4165-bed0-e8464950788e" [ 1890.753505] env[62476]: _type = "Task" [ 1890.753505] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.765083] env[62476]: DEBUG oslo_vmware.api [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]5273139c-f3df-4165-bed0-e8464950788e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.809799] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1890.810026] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1890.810219] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Deleting the datastore file [datastore1] 139391d4-af04-4053-801a-792fc4fd724a {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1890.810505] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a297783c-3b7a-4976-aea9-0e59b6135460 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.816824] env[62476]: DEBUG oslo_vmware.api [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for the task: (returnval){ [ 1890.816824] env[62476]: value = "task-4319174" [ 1890.816824] env[62476]: _type = "Task" [ 1890.816824] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.825031] env[62476]: DEBUG oslo_vmware.api [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': task-4319174, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.264766] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1891.265158] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Creating directory with path [datastore1] vmware_temp/3241fc56-39aa-4cca-b175-5de8beabf584/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1891.265281] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c408fd8-9b12-49e4-b64c-a99188617fa3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.277892] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Created directory with path [datastore1] vmware_temp/3241fc56-39aa-4cca-b175-5de8beabf584/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1891.278136] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Fetch image to [datastore1] vmware_temp/3241fc56-39aa-4cca-b175-5de8beabf584/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1891.278255] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/3241fc56-39aa-4cca-b175-5de8beabf584/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1891.279089] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729a3a82-9e5e-4b15-bcfc-897cf9fe05ca {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.286315] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4499ce96-8bd2-448b-ba6b-ae2df2331a8d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.295737] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7960a27-165d-4fc6-ae91-a2ab5edd8764 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.330041] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c98addf-2be2-44a0-a04a-736dc219205e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.337545] env[62476]: DEBUG oslo_vmware.api [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': task-4319174, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094072} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.339106] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1891.339306] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1891.339478] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1891.339675] env[62476]: INFO nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1891.341533] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4ec7e3d4-13cc-4cea-a4c0-90bae2443b1f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.343494] env[62476]: DEBUG nova.compute.claims [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1891.343667] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.343877] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.366736] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1891.519512] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.520356] env[62476]: ERROR nova.compute.manager [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7. [ 1891.520356] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Traceback (most recent call last): [ 1891.520356] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1891.520356] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1891.520356] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1891.520356] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] result = getattr(controller, method)(*args, **kwargs) [ 1891.520356] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1891.520356] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self._get(image_id) [ 1891.520356] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1891.520356] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1891.520356] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1891.521053] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] resp, body = self.http_client.get(url, headers=header) [ 1891.521053] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1891.521053] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self.request(url, 'GET', **kwargs) [ 1891.521053] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1891.521053] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self._handle_response(resp) [ 1891.521053] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1891.521053] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] raise exc.from_response(resp, resp.content) [ 1891.521053] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1891.521053] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] [ 1891.521053] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] During handling of the above exception, another exception occurred: [ 1891.521053] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] [ 1891.521053] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Traceback (most recent call last): [ 1891.521609] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1891.521609] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] yield resources [ 1891.521609] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1891.521609] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] self.driver.spawn(context, instance, image_meta, [ 1891.521609] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1891.521609] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1891.521609] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1891.521609] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] self._fetch_image_if_missing(context, vi) [ 1891.521609] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1891.521609] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] image_fetch(context, vi, tmp_image_ds_loc) [ 1891.521609] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1891.521609] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] images.fetch_image( [ 1891.521609] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1891.522078] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] metadata = IMAGE_API.get(context, image_ref) [ 1891.522078] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1891.522078] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return session.show(context, image_id, [ 1891.522078] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1891.522078] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] _reraise_translated_image_exception(image_id) [ 1891.522078] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1891.522078] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] raise new_exc.with_traceback(exc_trace) [ 1891.522078] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1891.522078] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1891.522078] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1891.522078] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] result = getattr(controller, method)(*args, **kwargs) [ 1891.522078] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1891.522078] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self._get(image_id) [ 1891.522434] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1891.522434] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1891.522434] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1891.522434] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] resp, body = self.http_client.get(url, headers=header) [ 1891.522434] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1891.522434] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self.request(url, 'GET', **kwargs) [ 1891.522434] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1891.522434] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self._handle_response(resp) [ 1891.522434] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1891.522434] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] raise exc.from_response(resp, resp.content) [ 1891.522434] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] nova.exception.ImageNotAuthorized: Not authorized for image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7. [ 1891.522434] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] [ 1891.522868] env[62476]: INFO nova.compute.manager [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Terminating instance [ 1891.522868] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.522868] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1891.523238] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquiring lock "refresh_cache-003e332b-9765-4db7-9f48-40d33c6532d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.523418] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquired lock "refresh_cache-003e332b-9765-4db7-9f48-40d33c6532d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.523611] env[62476]: DEBUG nova.network.neutron [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1891.527872] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a3add79-ca8a-4d49-83fd-5299f6e4e07c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.537060] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1891.537247] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1891.538227] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30bc62e5-2bec-4b5f-9f61-b1e408308c76 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.547033] env[62476]: DEBUG oslo_vmware.api [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Waiting for the task: (returnval){ [ 1891.547033] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52865f14-c69a-2fd4-e772-88bcee6870b8" [ 1891.547033] env[62476]: _type = "Task" [ 1891.547033] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.554696] env[62476]: DEBUG oslo_vmware.api [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52865f14-c69a-2fd4-e772-88bcee6870b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.557519] env[62476]: DEBUG nova.network.neutron [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1891.581893] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa5a481-4a75-4d9b-a16f-b8ac78dd1578 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.589878] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9908908-c2bf-41e5-9a7a-4e2ad3919268 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.621896] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9649cdd9-9aa7-4abb-b985-8544bb3f4edb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.629966] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca96be4d-a3c3-4962-a50d-6aa73975b655 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.634569] env[62476]: DEBUG nova.network.neutron [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.645646] env[62476]: DEBUG nova.compute.provider_tree [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1891.647363] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Releasing lock "refresh_cache-003e332b-9765-4db7-9f48-40d33c6532d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.647732] env[62476]: DEBUG nova.compute.manager [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1891.647919] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1891.649057] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c028cfe1-9522-412b-9a00-d12aa5962d23 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.656684] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1891.657471] env[62476]: DEBUG nova.scheduler.client.report [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1891.660241] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ea46daa-cc13-4249-82ca-c8ba4c4cb700 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.671775] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.328s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.672332] env[62476]: ERROR nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1891.672332] env[62476]: Faults: ['InvalidArgument'] [ 1891.672332] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] Traceback (most recent call last): [ 1891.672332] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1891.672332] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] self.driver.spawn(context, instance, image_meta, [ 1891.672332] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1891.672332] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1891.672332] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1891.672332] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] self._fetch_image_if_missing(context, vi) [ 1891.672332] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1891.672332] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] image_cache(vi, tmp_image_ds_loc) [ 1891.672332] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1891.672755] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] vm_util.copy_virtual_disk( [ 1891.672755] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1891.672755] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] session._wait_for_task(vmdk_copy_task) [ 1891.672755] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1891.672755] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] return self.wait_for_task(task_ref) [ 1891.672755] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1891.672755] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] return evt.wait() [ 1891.672755] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1891.672755] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] result = hub.switch() [ 1891.672755] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1891.672755] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] return self.greenlet.switch() [ 1891.672755] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1891.672755] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] self.f(*self.args, **self.kw) [ 1891.673400] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1891.673400] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] raise exceptions.translate_fault(task_info.error) [ 1891.673400] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1891.673400] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] Faults: ['InvalidArgument'] [ 1891.673400] env[62476]: ERROR nova.compute.manager [instance: 139391d4-af04-4053-801a-792fc4fd724a] [ 1891.673400] env[62476]: DEBUG nova.compute.utils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1891.674619] env[62476]: DEBUG nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Build of instance 139391d4-af04-4053-801a-792fc4fd724a was re-scheduled: A specified parameter was not correct: fileType [ 1891.674619] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1891.675012] env[62476]: DEBUG nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1891.675209] env[62476]: DEBUG nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1891.675386] env[62476]: DEBUG nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1891.675550] env[62476]: DEBUG nova.network.neutron [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1891.701691] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1891.701691] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1891.701914] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Deleting the datastore file [datastore1] 003e332b-9765-4db7-9f48-40d33c6532d1 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1891.703048] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64bc1b60-df0e-47c7-bbae-b1e935840aac {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.709673] env[62476]: DEBUG oslo_vmware.api [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Waiting for the task: (returnval){ [ 1891.709673] env[62476]: value = "task-4319176" [ 1891.709673] env[62476]: _type = "Task" [ 1891.709673] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.717607] env[62476]: DEBUG oslo_vmware.api [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Task: {'id': task-4319176, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.059226] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1892.059500] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Creating directory with path [datastore1] vmware_temp/6ed8cef6-da27-4a3d-b8a7-b7bac42456ff/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1892.060290] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9013738-f41d-4e45-9c39-d1423c2bea6d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.076249] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Created directory with path [datastore1] vmware_temp/6ed8cef6-da27-4a3d-b8a7-b7bac42456ff/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1892.076472] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Fetch image to [datastore1] vmware_temp/6ed8cef6-da27-4a3d-b8a7-b7bac42456ff/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1892.076697] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/6ed8cef6-da27-4a3d-b8a7-b7bac42456ff/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1892.077629] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a161974-259f-4dc7-ab63-18218dfd69bf {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.085192] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f048adf3-41c0-4fe2-8cc4-342c8480acfa {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.094746] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e3bcec-7faa-463f-965b-e8e400ce485f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.126823] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea0505e-ee43-45ff-a679-1bc2abcadb53 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.133960] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dffd2760-1f3e-4179-9f1c-e268a65ddf59 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.156327] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1892.212203] env[62476]: DEBUG oslo_vmware.rw_handles [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6ed8cef6-da27-4a3d-b8a7-b7bac42456ff/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1892.279150] env[62476]: DEBUG oslo_vmware.api [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Task: {'id': task-4319176, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.042513} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.280084] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1892.280284] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1892.280454] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1892.280622] env[62476]: INFO nova.compute.manager [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1892.280871] env[62476]: DEBUG oslo.service.loopingcall [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1892.281340] env[62476]: DEBUG oslo_vmware.rw_handles [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1892.281876] env[62476]: DEBUG oslo_vmware.rw_handles [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6ed8cef6-da27-4a3d-b8a7-b7bac42456ff/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1892.281876] env[62476]: DEBUG nova.compute.manager [-] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Skipping network deallocation for instance since networking was not requested. {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1892.284273] env[62476]: DEBUG nova.compute.claims [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1892.284816] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.284816] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.288178] env[62476]: DEBUG nova.network.neutron [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.300772] env[62476]: INFO nova.compute.manager [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Took 0.63 seconds to deallocate network for instance. [ 1892.411664] env[62476]: INFO nova.scheduler.client.report [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Deleted allocations for instance 139391d4-af04-4053-801a-792fc4fd724a [ 1892.435836] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd3cd06e-0973-479b-9948-3ff46ece2271 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "139391d4-af04-4053-801a-792fc4fd724a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 663.179s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.437025] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15edfb5e-aff8-4200-9e33-ae40e1629f6f tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "139391d4-af04-4053-801a-792fc4fd724a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 467.489s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.437259] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15edfb5e-aff8-4200-9e33-ae40e1629f6f tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "139391d4-af04-4053-801a-792fc4fd724a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.437471] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15edfb5e-aff8-4200-9e33-ae40e1629f6f tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "139391d4-af04-4053-801a-792fc4fd724a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.437833] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15edfb5e-aff8-4200-9e33-ae40e1629f6f tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "139391d4-af04-4053-801a-792fc4fd724a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.439884] env[62476]: INFO nova.compute.manager [None req-15edfb5e-aff8-4200-9e33-ae40e1629f6f tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Terminating instance [ 1892.441647] env[62476]: DEBUG nova.compute.manager [None req-15edfb5e-aff8-4200-9e33-ae40e1629f6f tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1892.441849] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-15edfb5e-aff8-4200-9e33-ae40e1629f6f tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1892.445086] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5ded344-a7f2-4198-812e-e86ef9d5e9ca {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.457668] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-720b0021-7f2e-40d1-8a2e-f353a8d5d14b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.475713] env[62476]: DEBUG nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1892.489628] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-15edfb5e-aff8-4200-9e33-ae40e1629f6f tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 139391d4-af04-4053-801a-792fc4fd724a could not be found. [ 1892.489865] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-15edfb5e-aff8-4200-9e33-ae40e1629f6f tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1892.490021] env[62476]: INFO nova.compute.manager [None req-15edfb5e-aff8-4200-9e33-ae40e1629f6f tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1892.490268] env[62476]: DEBUG oslo.service.loopingcall [None req-15edfb5e-aff8-4200-9e33-ae40e1629f6f tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1892.493033] env[62476]: DEBUG nova.compute.manager [-] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1892.493213] env[62476]: DEBUG nova.network.neutron [-] [instance: 139391d4-af04-4053-801a-792fc4fd724a] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1892.522826] env[62476]: DEBUG nova.network.neutron [-] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.530930] env[62476]: INFO nova.compute.manager [-] [instance: 139391d4-af04-4053-801a-792fc4fd724a] Took 0.04 seconds to deallocate network for instance. [ 1892.539020] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.575783] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab9579e-e106-429a-ae3b-440d0fcd8828 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.584553] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d233b65-0e2e-4e5d-8847-cef87ca4244c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.619208] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fd34c1-4841-40f4-adc4-d677d672be61 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.629973] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e43c01ff-596d-4b0b-a3fb-33a843f9e3d1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.643667] env[62476]: DEBUG nova.compute.provider_tree [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1892.652772] env[62476]: DEBUG nova.scheduler.client.report [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1892.671925] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.387s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.672271] env[62476]: ERROR nova.compute.manager [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7. [ 1892.672271] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Traceback (most recent call last): [ 1892.672271] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1892.672271] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1892.672271] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1892.672271] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] result = getattr(controller, method)(*args, **kwargs) [ 1892.672271] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1892.672271] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self._get(image_id) [ 1892.672271] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1892.672271] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1892.672271] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1892.672712] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] resp, body = self.http_client.get(url, headers=header) [ 1892.672712] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1892.672712] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self.request(url, 'GET', **kwargs) [ 1892.672712] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1892.672712] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self._handle_response(resp) [ 1892.672712] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1892.672712] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] raise exc.from_response(resp, resp.content) [ 1892.672712] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1892.672712] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] [ 1892.672712] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] During handling of the above exception, another exception occurred: [ 1892.672712] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] [ 1892.672712] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Traceback (most recent call last): [ 1892.673114] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1892.673114] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] self.driver.spawn(context, instance, image_meta, [ 1892.673114] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1892.673114] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1892.673114] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1892.673114] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] self._fetch_image_if_missing(context, vi) [ 1892.673114] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1892.673114] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] image_fetch(context, vi, tmp_image_ds_loc) [ 1892.673114] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1892.673114] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] images.fetch_image( [ 1892.673114] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1892.673114] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] metadata = IMAGE_API.get(context, image_ref) [ 1892.673114] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1892.673463] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return session.show(context, image_id, [ 1892.673463] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1892.673463] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] _reraise_translated_image_exception(image_id) [ 1892.673463] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1892.673463] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] raise new_exc.with_traceback(exc_trace) [ 1892.673463] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1892.673463] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1892.673463] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1892.673463] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] result = getattr(controller, method)(*args, **kwargs) [ 1892.673463] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1892.673463] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self._get(image_id) [ 1892.673463] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1892.673463] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1892.673808] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1892.673808] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] resp, body = self.http_client.get(url, headers=header) [ 1892.673808] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1892.673808] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self.request(url, 'GET', **kwargs) [ 1892.673808] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1892.673808] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self._handle_response(resp) [ 1892.673808] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1892.673808] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] raise exc.from_response(resp, resp.content) [ 1892.673808] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] nova.exception.ImageNotAuthorized: Not authorized for image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7. [ 1892.673808] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] [ 1892.673808] env[62476]: DEBUG nova.compute.utils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Not authorized for image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7. {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1892.674263] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.135s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.675696] env[62476]: INFO nova.compute.claims [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1892.679783] env[62476]: DEBUG nova.compute.manager [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Build of instance 003e332b-9765-4db7-9f48-40d33c6532d1 was re-scheduled: Not authorized for image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7. {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1892.680288] env[62476]: DEBUG nova.compute.manager [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1892.680517] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquiring lock "refresh_cache-003e332b-9765-4db7-9f48-40d33c6532d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.680666] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquired lock "refresh_cache-003e332b-9765-4db7-9f48-40d33c6532d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.680827] env[62476]: DEBUG nova.network.neutron [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1892.684169] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15edfb5e-aff8-4200-9e33-ae40e1629f6f tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "139391d4-af04-4053-801a-792fc4fd724a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.247s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.685256] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "139391d4-af04-4053-801a-792fc4fd724a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 78.238s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.685477] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 139391d4-af04-4053-801a-792fc4fd724a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1892.685656] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "139391d4-af04-4053-801a-792fc4fd724a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.706258] env[62476]: DEBUG nova.network.neutron [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1892.780569] env[62476]: DEBUG nova.network.neutron [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.792056] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Releasing lock "refresh_cache-003e332b-9765-4db7-9f48-40d33c6532d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.792641] env[62476]: DEBUG nova.compute.manager [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1892.792641] env[62476]: DEBUG nova.compute.manager [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Skipping network deallocation for instance since networking was not requested. {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1892.881305] env[62476]: INFO nova.scheduler.client.report [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Deleted allocations for instance 003e332b-9765-4db7-9f48-40d33c6532d1 [ 1892.888635] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfff39c3-c383-4eec-86ce-5b8e6af1f149 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.897866] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6510842d-bc04-4465-99c1-edc9647a3bcc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.903167] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d1229465-0411-4de8-946e-9a5a6d73bc8b tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Lock "003e332b-9765-4db7-9f48-40d33c6532d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 620.569s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.906013] env[62476]: DEBUG oslo_concurrency.lockutils [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Lock "003e332b-9765-4db7-9f48-40d33c6532d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 423.564s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.906258] env[62476]: DEBUG oslo_concurrency.lockutils [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquiring lock "003e332b-9765-4db7-9f48-40d33c6532d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.906490] env[62476]: DEBUG oslo_concurrency.lockutils [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Lock "003e332b-9765-4db7-9f48-40d33c6532d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.906691] env[62476]: DEBUG oslo_concurrency.lockutils [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Lock "003e332b-9765-4db7-9f48-40d33c6532d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.933602] env[62476]: INFO nova.compute.manager [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Terminating instance [ 1892.935354] env[62476]: DEBUG nova.compute.manager [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1892.938331] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcaa851b-3a31-4d36-90a4-8d7a711adc9c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.941762] env[62476]: DEBUG oslo_concurrency.lockutils [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquiring lock "refresh_cache-003e332b-9765-4db7-9f48-40d33c6532d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.941925] env[62476]: DEBUG oslo_concurrency.lockutils [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Acquired lock "refresh_cache-003e332b-9765-4db7-9f48-40d33c6532d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.942145] env[62476]: DEBUG nova.network.neutron [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1892.948785] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3a06a9-9a9f-48c9-acb4-83cd68198922 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.967222] env[62476]: DEBUG nova.compute.provider_tree [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1892.973275] env[62476]: DEBUG nova.network.neutron [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1892.977788] env[62476]: DEBUG nova.scheduler.client.report [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1892.990854] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.991957] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.318s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.992437] env[62476]: DEBUG nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1892.996687] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.006s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.998058] env[62476]: INFO nova.compute.claims [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1893.030935] env[62476]: DEBUG nova.compute.utils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1893.032811] env[62476]: DEBUG nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1893.032811] env[62476]: DEBUG nova.network.neutron [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1893.043830] env[62476]: DEBUG nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1893.053929] env[62476]: DEBUG nova.network.neutron [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.061395] env[62476]: DEBUG oslo_concurrency.lockutils [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Releasing lock "refresh_cache-003e332b-9765-4db7-9f48-40d33c6532d1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.061782] env[62476]: DEBUG nova.compute.manager [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1893.061969] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1893.062503] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4523ffb0-ad35-4f78-ac3b-a03c069165e0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.076541] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ce49f2-45f4-4f07-921e-4e71dfde9ae4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.094805] env[62476]: DEBUG nova.policy [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84bfe931cb0149198cbfa760bda985fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '69b584dd19a64cf7884d0302529d4de2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1893.114573] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 003e332b-9765-4db7-9f48-40d33c6532d1 could not be found. [ 1893.114786] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1893.114964] env[62476]: INFO nova.compute.manager [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1893.115581] env[62476]: DEBUG oslo.service.loopingcall [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1893.117938] env[62476]: DEBUG nova.compute.manager [-] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1893.118048] env[62476]: DEBUG nova.network.neutron [-] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1893.123211] env[62476]: DEBUG nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1893.155134] env[62476]: DEBUG nova.virt.hardware [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1893.155134] env[62476]: DEBUG nova.virt.hardware [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1893.155134] env[62476]: DEBUG nova.virt.hardware [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1893.155381] env[62476]: DEBUG nova.virt.hardware [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1893.155381] env[62476]: DEBUG nova.virt.hardware [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1893.155782] env[62476]: DEBUG nova.virt.hardware [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1893.156299] env[62476]: DEBUG nova.virt.hardware [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1893.156299] env[62476]: DEBUG nova.virt.hardware [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1893.156420] env[62476]: DEBUG nova.virt.hardware [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1893.156542] env[62476]: DEBUG nova.virt.hardware [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1893.156814] env[62476]: DEBUG nova.virt.hardware [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1893.157807] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5419e98f-a937-46c4-a07f-1f99c1824b5b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.168836] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa39be37-1d61-4ee0-99db-175e37a01730 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.247117] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8136ad-35a3-41d3-95f4-e55e94e5d99b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.255370] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fcc249-bbe3-4740-b001-61b4ef14dee5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.285971] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0d0451-c766-40b8-ad09-fb38d15a24b8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.289169] env[62476]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=62476) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1893.289422] env[62476]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-c4724afc-fcd9-4d1e-b186-ac9041a422e7'] [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1893.289940] env[62476]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1893.290493] env[62476]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1893.291376] env[62476]: ERROR oslo.service.loopingcall [ 1893.291807] env[62476]: ERROR nova.compute.manager [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1893.298511] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7ef301-2ea9-4a52-999e-1e1878ad458c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.316189] env[62476]: DEBUG nova.compute.provider_tree [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1893.328689] env[62476]: DEBUG nova.scheduler.client.report [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1893.334413] env[62476]: ERROR nova.compute.manager [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1893.334413] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Traceback (most recent call last): [ 1893.334413] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.334413] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] ret = obj(*args, **kwargs) [ 1893.334413] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1893.334413] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] exception_handler_v20(status_code, error_body) [ 1893.334413] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1893.334413] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] raise client_exc(message=error_message, [ 1893.334413] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1893.334413] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Neutron server returns request_ids: ['req-c4724afc-fcd9-4d1e-b186-ac9041a422e7'] [ 1893.334413] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] [ 1893.336147] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] During handling of the above exception, another exception occurred: [ 1893.336147] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] [ 1893.336147] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Traceback (most recent call last): [ 1893.336147] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1893.336147] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] self._delete_instance(context, instance, bdms) [ 1893.336147] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1893.336147] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] self._shutdown_instance(context, instance, bdms) [ 1893.336147] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1893.336147] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] self._try_deallocate_network(context, instance, requested_networks) [ 1893.336147] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1893.336147] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] with excutils.save_and_reraise_exception(): [ 1893.336147] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1893.336147] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] self.force_reraise() [ 1893.336505] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1893.336505] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] raise self.value [ 1893.336505] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1893.336505] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] _deallocate_network_with_retries() [ 1893.336505] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1893.336505] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return evt.wait() [ 1893.336505] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1893.336505] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] result = hub.switch() [ 1893.336505] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1893.336505] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self.greenlet.switch() [ 1893.336505] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1893.336505] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] result = func(*self.args, **self.kw) [ 1893.336911] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1893.336911] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] result = f(*args, **kwargs) [ 1893.336911] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1893.336911] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] self._deallocate_network( [ 1893.336911] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1893.336911] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] self.network_api.deallocate_for_instance( [ 1893.336911] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1893.336911] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] data = neutron.list_ports(**search_opts) [ 1893.336911] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.336911] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] ret = obj(*args, **kwargs) [ 1893.336911] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1893.336911] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self.list('ports', self.ports_path, retrieve_all, [ 1893.336911] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.337277] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] ret = obj(*args, **kwargs) [ 1893.337277] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1893.337277] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] for r in self._pagination(collection, path, **params): [ 1893.337277] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1893.337277] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] res = self.get(path, params=params) [ 1893.337277] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.337277] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] ret = obj(*args, **kwargs) [ 1893.337277] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1893.337277] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self.retry_request("GET", action, body=body, [ 1893.337277] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.337277] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] ret = obj(*args, **kwargs) [ 1893.337277] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1893.337277] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] return self.do_request(method, action, body=body, [ 1893.337649] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.337649] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] ret = obj(*args, **kwargs) [ 1893.337649] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1893.337649] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] self._handle_fault_response(status_code, replybody, resp) [ 1893.337649] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1893.337649] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1893.337649] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1893.337649] env[62476]: ERROR nova.compute.manager [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] [ 1893.346240] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.349s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.346797] env[62476]: DEBUG nova.compute.manager [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1893.369777] env[62476]: DEBUG oslo_concurrency.lockutils [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Lock "003e332b-9765-4db7-9f48-40d33c6532d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.463s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.369777] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "003e332b-9765-4db7-9f48-40d33c6532d1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 78.920s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.369777] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] During sync_power_state the instance has a pending task (deleting). Skip. [ 1893.369777] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "003e332b-9765-4db7-9f48-40d33c6532d1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.387989] env[62476]: DEBUG nova.compute.utils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1893.389394] env[62476]: DEBUG nova.compute.manager [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Not allocating networking since 'none' was specified. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1893.419736] env[62476]: DEBUG nova.compute.manager [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1893.462194] env[62476]: INFO nova.compute.manager [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] [instance: 003e332b-9765-4db7-9f48-40d33c6532d1] Successfully reverted task state from None on failure for instance. [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server [None req-92631b83-6d9f-43c3-95fc-e46d9e94f4da tempest-ServerShowV247Test-230695872 tempest-ServerShowV247Test-230695872-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-c4724afc-fcd9-4d1e-b186-ac9041a422e7'] [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1893.465899] env[62476]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1893.466934] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1893.467919] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server raise self.value [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1893.468716] env[62476]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.469381] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1893.469861] env[62476]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1893.470891] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1893.470891] env[62476]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1893.470891] env[62476]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1893.470891] env[62476]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1893.470891] env[62476]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1893.470891] env[62476]: ERROR oslo_messaging.rpc.server [ 1893.520452] env[62476]: DEBUG nova.compute.manager [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1893.568897] env[62476]: DEBUG nova.virt.hardware [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1893.569383] env[62476]: DEBUG nova.virt.hardware [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1893.569561] env[62476]: DEBUG nova.virt.hardware [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1893.569748] env[62476]: DEBUG nova.virt.hardware [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1893.569896] env[62476]: DEBUG nova.virt.hardware [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1893.570055] env[62476]: DEBUG nova.virt.hardware [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1893.570282] env[62476]: DEBUG nova.virt.hardware [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1893.570459] env[62476]: DEBUG nova.virt.hardware [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1893.571975] env[62476]: DEBUG nova.virt.hardware [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1893.571975] env[62476]: DEBUG nova.virt.hardware [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1893.571975] env[62476]: DEBUG nova.virt.hardware [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1893.572203] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b02b36-c07f-47ac-8512-a6f0a724bad3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.580977] env[62476]: DEBUG nova.network.neutron [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Successfully created port: 8366dcad-61a8-4a54-b732-1cf284f0a4ed {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1893.583917] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c27967-09e4-4cec-a927-96b7c1aa063f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.600124] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Instance VIF info [] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1893.605978] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Creating folder: Project (86ee024d92de45f1be3adc5bae43da80). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1893.606351] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a14b4b0b-619a-4200-abc6-57caf71acbed {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.618052] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Created folder: Project (86ee024d92de45f1be3adc5bae43da80) in parent group-v849485. [ 1893.618269] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Creating folder: Instances. Parent ref: group-v849576. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1893.618524] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d84a896-99cb-418d-ad29-e30fbab1c620 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.628858] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Created folder: Instances in parent group-v849576. [ 1893.629115] env[62476]: DEBUG oslo.service.loopingcall [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1893.629459] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1893.629614] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d71e482-1d4a-4d39-8a6c-97ccda6c60c4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.651020] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1893.651020] env[62476]: value = "task-4319179" [ 1893.651020] env[62476]: _type = "Task" [ 1893.651020] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.657192] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319179, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.160067] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319179, 'name': CreateVM_Task, 'duration_secs': 0.296897} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.160348] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1894.160758] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.160923] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1894.161294] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1894.161562] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdb1506a-51f9-4d77-89ff-939a330f202a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.167128] env[62476]: DEBUG oslo_vmware.api [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Waiting for the task: (returnval){ [ 1894.167128] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52b77096-24e7-e739-8d39-b75d7a77aaeb" [ 1894.167128] env[62476]: _type = "Task" [ 1894.167128] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.176698] env[62476]: DEBUG oslo_vmware.api [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52b77096-24e7-e739-8d39-b75d7a77aaeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.192793] env[62476]: DEBUG nova.compute.manager [req-0a382eaa-e9a5-402b-9279-338de69e3283 req-9848ab3e-56e0-4273-899a-799e57c0ec74 service nova] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Received event network-vif-plugged-8366dcad-61a8-4a54-b732-1cf284f0a4ed {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1894.193154] env[62476]: DEBUG oslo_concurrency.lockutils [req-0a382eaa-e9a5-402b-9279-338de69e3283 req-9848ab3e-56e0-4273-899a-799e57c0ec74 service nova] Acquiring lock "3462762c-09da-473b-b2ba-4dce6c99dd8d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.193427] env[62476]: DEBUG oslo_concurrency.lockutils [req-0a382eaa-e9a5-402b-9279-338de69e3283 req-9848ab3e-56e0-4273-899a-799e57c0ec74 service nova] Lock "3462762c-09da-473b-b2ba-4dce6c99dd8d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.193571] env[62476]: DEBUG oslo_concurrency.lockutils [req-0a382eaa-e9a5-402b-9279-338de69e3283 req-9848ab3e-56e0-4273-899a-799e57c0ec74 service nova] Lock "3462762c-09da-473b-b2ba-4dce6c99dd8d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.193732] env[62476]: DEBUG nova.compute.manager [req-0a382eaa-e9a5-402b-9279-338de69e3283 req-9848ab3e-56e0-4273-899a-799e57c0ec74 service nova] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] No waiting events found dispatching network-vif-plugged-8366dcad-61a8-4a54-b732-1cf284f0a4ed {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1894.193896] env[62476]: WARNING nova.compute.manager [req-0a382eaa-e9a5-402b-9279-338de69e3283 req-9848ab3e-56e0-4273-899a-799e57c0ec74 service nova] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Received unexpected event network-vif-plugged-8366dcad-61a8-4a54-b732-1cf284f0a4ed for instance with vm_state building and task_state spawning. [ 1894.260182] env[62476]: DEBUG nova.network.neutron [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Successfully updated port: 8366dcad-61a8-4a54-b732-1cf284f0a4ed {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1894.272713] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "refresh_cache-3462762c-09da-473b-b2ba-4dce6c99dd8d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.272894] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquired lock "refresh_cache-3462762c-09da-473b-b2ba-4dce6c99dd8d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1894.273042] env[62476]: DEBUG nova.network.neutron [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1894.323889] env[62476]: DEBUG nova.network.neutron [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1894.678900] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.679204] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1894.679534] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.780877] env[62476]: DEBUG nova.network.neutron [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Updating instance_info_cache with network_info: [{"id": "8366dcad-61a8-4a54-b732-1cf284f0a4ed", "address": "fa:16:3e:7e:06:f3", "network": {"id": "541ae4a8-5066-487b-9521-ddc3013eecf0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-226251912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69b584dd19a64cf7884d0302529d4de2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8366dcad-61", "ovs_interfaceid": "8366dcad-61a8-4a54-b732-1cf284f0a4ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.793049] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Releasing lock "refresh_cache-3462762c-09da-473b-b2ba-4dce6c99dd8d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.793358] env[62476]: DEBUG nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Instance network_info: |[{"id": "8366dcad-61a8-4a54-b732-1cf284f0a4ed", "address": "fa:16:3e:7e:06:f3", "network": {"id": "541ae4a8-5066-487b-9521-ddc3013eecf0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-226251912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69b584dd19a64cf7884d0302529d4de2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8366dcad-61", "ovs_interfaceid": "8366dcad-61a8-4a54-b732-1cf284f0a4ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1894.793764] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:06:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '399f3826-705c-45f7-9fe0-3a08a945151a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8366dcad-61a8-4a54-b732-1cf284f0a4ed', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1894.801012] env[62476]: DEBUG oslo.service.loopingcall [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1894.801486] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1894.801718] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a9f895c-2de1-4126-b4e9-1c6fb429c6ef {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.821987] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1894.821987] env[62476]: value = "task-4319180" [ 1894.821987] env[62476]: _type = "Task" [ 1894.821987] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.829875] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319180, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.332627] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319180, 'name': CreateVM_Task, 'duration_secs': 0.324366} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.333041] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1895.341534] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.341724] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.342083] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1895.342345] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2327c32-3c23-467a-980e-958a7b8557ea {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.347774] env[62476]: DEBUG oslo_vmware.api [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Waiting for the task: (returnval){ [ 1895.347774] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]528efee9-1e33-7ba0-6cd9-5ac2afd1d06b" [ 1895.347774] env[62476]: _type = "Task" [ 1895.347774] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.356162] env[62476]: DEBUG oslo_vmware.api [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]528efee9-1e33-7ba0-6cd9-5ac2afd1d06b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.858317] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.858568] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1895.858790] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1896.228136] env[62476]: DEBUG nova.compute.manager [req-a32fc380-bdd0-4141-b3a3-f28e5e53184d req-ed159ec3-3b0a-4d0b-8a85-9c07ae571ac4 service nova] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Received event network-changed-8366dcad-61a8-4a54-b732-1cf284f0a4ed {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1896.228285] env[62476]: DEBUG nova.compute.manager [req-a32fc380-bdd0-4141-b3a3-f28e5e53184d req-ed159ec3-3b0a-4d0b-8a85-9c07ae571ac4 service nova] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Refreshing instance network info cache due to event network-changed-8366dcad-61a8-4a54-b732-1cf284f0a4ed. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1896.228506] env[62476]: DEBUG oslo_concurrency.lockutils [req-a32fc380-bdd0-4141-b3a3-f28e5e53184d req-ed159ec3-3b0a-4d0b-8a85-9c07ae571ac4 service nova] Acquiring lock "refresh_cache-3462762c-09da-473b-b2ba-4dce6c99dd8d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1896.228945] env[62476]: DEBUG oslo_concurrency.lockutils [req-a32fc380-bdd0-4141-b3a3-f28e5e53184d req-ed159ec3-3b0a-4d0b-8a85-9c07ae571ac4 service nova] Acquired lock "refresh_cache-3462762c-09da-473b-b2ba-4dce6c99dd8d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1896.228945] env[62476]: DEBUG nova.network.neutron [req-a32fc380-bdd0-4141-b3a3-f28e5e53184d req-ed159ec3-3b0a-4d0b-8a85-9c07ae571ac4 service nova] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Refreshing network info cache for port 8366dcad-61a8-4a54-b732-1cf284f0a4ed {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1896.499345] env[62476]: DEBUG nova.network.neutron [req-a32fc380-bdd0-4141-b3a3-f28e5e53184d req-ed159ec3-3b0a-4d0b-8a85-9c07ae571ac4 service nova] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Updated VIF entry in instance network info cache for port 8366dcad-61a8-4a54-b732-1cf284f0a4ed. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1896.499722] env[62476]: DEBUG nova.network.neutron [req-a32fc380-bdd0-4141-b3a3-f28e5e53184d req-ed159ec3-3b0a-4d0b-8a85-9c07ae571ac4 service nova] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Updating instance_info_cache with network_info: [{"id": "8366dcad-61a8-4a54-b732-1cf284f0a4ed", "address": "fa:16:3e:7e:06:f3", "network": {"id": "541ae4a8-5066-487b-9521-ddc3013eecf0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-226251912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69b584dd19a64cf7884d0302529d4de2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8366dcad-61", "ovs_interfaceid": "8366dcad-61a8-4a54-b732-1cf284f0a4ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.510166] env[62476]: DEBUG oslo_concurrency.lockutils [req-a32fc380-bdd0-4141-b3a3-f28e5e53184d req-ed159ec3-3b0a-4d0b-8a85-9c07ae571ac4 service nova] Releasing lock "refresh_cache-3462762c-09da-473b-b2ba-4dce6c99dd8d" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1899.027402] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1899.042530] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.042769] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.042950] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.043145] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1899.044706] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1464271-2e99-4843-8713-71bbd0743a9a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.053521] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb629ade-0738-4da6-afc3-6a244c4e3ddd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.070995] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6129cb5-2db3-46b7-9de5-26e569e80bc3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.078664] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88349a3e-7219-4451-a18e-b2962122baac {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.108239] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180701MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1899.108395] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.108528] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.186269] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 27737774-efb5-4aee-a0c0-695e78a15dd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.186445] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance fe895d70-4c56-4854-83bf-a66cc1623d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.186575] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance c7e551af-a94e-48da-a725-53ebd73d43ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.186696] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ea606214-a34b-4972-8948-a6ff8c55b889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.186811] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1d67c106-ced2-4b4e-8abd-1652bd0509d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.186925] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e6b815fb-fa2d-4797-8810-c2b891f375cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.187051] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance cc61313f-d7db-4c5d-bb8e-1e516d2a89ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.187165] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 9497c622-7f14-4fc2-ac24-d611897a8be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.187281] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3462762c-09da-473b-b2ba-4dce6c99dd8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.187393] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.201929] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 8fdd45f2-0c21-461f-896e-698182bd5337 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1899.202175] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1899.202324] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1899.342551] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc94da0-6e7c-4b12-9c46-dace670dc3c7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.350455] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e0e0d4-3e4c-4f56-9a6a-9cee135072ad {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.382186] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb8cf5d-d563-49f0-823d-a83ac62659d8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.390137] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c8e921-4132-4647-9f22-4571702e4257 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.403841] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1899.415505] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1899.430058] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1899.430266] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.322s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.430906] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1902.431292] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1902.431292] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1902.452325] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1902.452512] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1902.452613] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1902.452743] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1902.452866] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1902.452984] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1902.453147] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1902.453275] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1902.453420] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1902.453561] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1902.453683] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1905.026798] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1905.027215] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1905.027336] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1906.028446] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1907.573682] env[62476]: DEBUG oslo_concurrency.lockutils [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "11af6076-e985-477c-98a6-437843b26b02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.574015] env[62476]: DEBUG oslo_concurrency.lockutils [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "11af6076-e985-477c-98a6-437843b26b02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.027215] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1910.022963] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1911.027306] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1912.028132] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.025049] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1924.330955] env[62476]: DEBUG oslo_concurrency.lockutils [None req-48d84c0f-64b3-4799-84b0-2d9296144e10 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1936.117665] env[62476]: DEBUG oslo_concurrency.lockutils [None req-678522f4-114e-4346-bfdc-83de56ff3870 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "3462762c-09da-473b-b2ba-4dce6c99dd8d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1938.161853] env[62476]: WARNING oslo_vmware.rw_handles [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1938.161853] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1938.161853] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1938.161853] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1938.161853] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1938.161853] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1938.161853] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1938.161853] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1938.161853] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1938.161853] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1938.161853] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1938.161853] env[62476]: ERROR oslo_vmware.rw_handles [ 1938.162609] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/6ed8cef6-da27-4a3d-b8a7-b7bac42456ff/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1938.165020] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1938.165374] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Copying Virtual Disk [datastore1] vmware_temp/6ed8cef6-da27-4a3d-b8a7-b7bac42456ff/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/6ed8cef6-da27-4a3d-b8a7-b7bac42456ff/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1938.166168] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2054c487-f10c-4665-a74f-1e7a7bea1dc9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.175197] env[62476]: DEBUG oslo_vmware.api [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Waiting for the task: (returnval){ [ 1938.175197] env[62476]: value = "task-4319181" [ 1938.175197] env[62476]: _type = "Task" [ 1938.175197] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.183652] env[62476]: DEBUG oslo_vmware.api [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Task: {'id': task-4319181, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.685675] env[62476]: DEBUG oslo_vmware.exceptions [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1938.685969] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.686536] env[62476]: ERROR nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1938.686536] env[62476]: Faults: ['InvalidArgument'] [ 1938.686536] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Traceback (most recent call last): [ 1938.686536] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1938.686536] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] yield resources [ 1938.686536] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1938.686536] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] self.driver.spawn(context, instance, image_meta, [ 1938.686536] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1938.686536] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1938.686536] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1938.686536] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] self._fetch_image_if_missing(context, vi) [ 1938.686536] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1938.686982] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] image_cache(vi, tmp_image_ds_loc) [ 1938.686982] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1938.686982] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] vm_util.copy_virtual_disk( [ 1938.686982] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1938.686982] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] session._wait_for_task(vmdk_copy_task) [ 1938.686982] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1938.686982] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] return self.wait_for_task(task_ref) [ 1938.686982] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1938.686982] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] return evt.wait() [ 1938.686982] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1938.686982] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] result = hub.switch() [ 1938.686982] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1938.686982] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] return self.greenlet.switch() [ 1938.687459] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1938.687459] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] self.f(*self.args, **self.kw) [ 1938.687459] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1938.687459] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] raise exceptions.translate_fault(task_info.error) [ 1938.687459] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1938.687459] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Faults: ['InvalidArgument'] [ 1938.687459] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] [ 1938.687459] env[62476]: INFO nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Terminating instance [ 1938.688504] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1938.688715] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1938.688955] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84b10c9e-7dbe-44a2-87d4-98e6af461259 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.691315] env[62476]: DEBUG nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1938.691508] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1938.692236] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4426a644-f635-4693-a6e0-4892291bf8fe {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.698862] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1938.699083] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2eb40e8c-c552-40ad-8581-7eb234c58d5f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.701133] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1938.701308] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1938.702284] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfeaed5c-696e-4345-9739-18f47b3ee226 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.707232] env[62476]: DEBUG oslo_vmware.api [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Waiting for the task: (returnval){ [ 1938.707232] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52a2fbcd-aa71-dd3c-7cfd-9136706f284b" [ 1938.707232] env[62476]: _type = "Task" [ 1938.707232] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.721771] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1938.722014] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Creating directory with path [datastore1] vmware_temp/2a5e492d-3552-4c9a-aae8-a3b60d230c17/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1938.722247] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a5e33d0-c8d7-45b3-a669-34451a826c62 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.778061] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Created directory with path [datastore1] vmware_temp/2a5e492d-3552-4c9a-aae8-a3b60d230c17/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1938.778280] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Fetch image to [datastore1] vmware_temp/2a5e492d-3552-4c9a-aae8-a3b60d230c17/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1938.778439] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/2a5e492d-3552-4c9a-aae8-a3b60d230c17/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1938.779302] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cba44e3-5db2-48e2-ab4d-367ba0e8dd43 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.787317] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800ad5fc-2e54-49d6-8df0-1f88af8af45d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.796947] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca23fa44-f0ba-440f-905d-393756d5398a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.828863] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ae7544-539f-4af3-a8fa-63a81dd8e39d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.835184] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ecf15165-f7c5-4bd4-9226-5c3a9ebda660 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.856500] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1938.919136] env[62476]: DEBUG oslo_vmware.rw_handles [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2a5e492d-3552-4c9a-aae8-a3b60d230c17/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1938.978851] env[62476]: DEBUG oslo_vmware.rw_handles [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1938.979103] env[62476]: DEBUG oslo_vmware.rw_handles [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2a5e492d-3552-4c9a-aae8-a3b60d230c17/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1939.817528] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1939.817911] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1939.818013] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Deleting the datastore file [datastore1] 27737774-efb5-4aee-a0c0-695e78a15dd6 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1939.818366] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05909f28-24b3-42c9-bbed-7f06db0c2106 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.825907] env[62476]: DEBUG oslo_vmware.api [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Waiting for the task: (returnval){ [ 1939.825907] env[62476]: value = "task-4319183" [ 1939.825907] env[62476]: _type = "Task" [ 1939.825907] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.834404] env[62476]: DEBUG oslo_vmware.api [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Task: {'id': task-4319183, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.336312] env[62476]: DEBUG oslo_vmware.api [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Task: {'id': task-4319183, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076807} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.336529] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1940.336781] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1940.337020] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1940.337246] env[62476]: INFO nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1940.340273] env[62476]: DEBUG nova.compute.claims [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1940.340486] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.340728] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.549377] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a80e634-df20-48af-a793-782b319a2b79 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.557516] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4546adde-1cac-4fb0-94ea-acabf3fced4e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.587401] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdcdd03-2fc3-4c8b-8376-ce0a96bf4099 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.595018] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6c7984-a532-4701-9f19-1d08bc713d0f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.608481] env[62476]: DEBUG nova.compute.provider_tree [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1940.618442] env[62476]: DEBUG nova.scheduler.client.report [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1940.632101] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.291s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.632644] env[62476]: ERROR nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1940.632644] env[62476]: Faults: ['InvalidArgument'] [ 1940.632644] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Traceback (most recent call last): [ 1940.632644] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1940.632644] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] self.driver.spawn(context, instance, image_meta, [ 1940.632644] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1940.632644] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1940.632644] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1940.632644] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] self._fetch_image_if_missing(context, vi) [ 1940.632644] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1940.632644] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] image_cache(vi, tmp_image_ds_loc) [ 1940.632644] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1940.633110] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] vm_util.copy_virtual_disk( [ 1940.633110] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1940.633110] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] session._wait_for_task(vmdk_copy_task) [ 1940.633110] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1940.633110] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] return self.wait_for_task(task_ref) [ 1940.633110] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1940.633110] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] return evt.wait() [ 1940.633110] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1940.633110] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] result = hub.switch() [ 1940.633110] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1940.633110] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] return self.greenlet.switch() [ 1940.633110] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1940.633110] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] self.f(*self.args, **self.kw) [ 1940.633545] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1940.633545] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] raise exceptions.translate_fault(task_info.error) [ 1940.633545] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1940.633545] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Faults: ['InvalidArgument'] [ 1940.633545] env[62476]: ERROR nova.compute.manager [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] [ 1940.633545] env[62476]: DEBUG nova.compute.utils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1940.635401] env[62476]: DEBUG nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Build of instance 27737774-efb5-4aee-a0c0-695e78a15dd6 was re-scheduled: A specified parameter was not correct: fileType [ 1940.635401] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1940.635779] env[62476]: DEBUG nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1940.635955] env[62476]: DEBUG nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1940.636143] env[62476]: DEBUG nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1940.636309] env[62476]: DEBUG nova.network.neutron [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1940.985520] env[62476]: DEBUG nova.network.neutron [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1940.998558] env[62476]: INFO nova.compute.manager [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Took 0.36 seconds to deallocate network for instance. [ 1941.102077] env[62476]: INFO nova.scheduler.client.report [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Deleted allocations for instance 27737774-efb5-4aee-a0c0-695e78a15dd6 [ 1941.128510] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ca089bab-aebb-4489-85f4-76f254b83df1 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "27737774-efb5-4aee-a0c0-695e78a15dd6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 606.348s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.129944] env[62476]: DEBUG oslo_concurrency.lockutils [None req-72828090-6104-4f7c-aeab-7193d1520870 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "27737774-efb5-4aee-a0c0-695e78a15dd6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 409.984s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.130299] env[62476]: DEBUG oslo_concurrency.lockutils [None req-72828090-6104-4f7c-aeab-7193d1520870 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "27737774-efb5-4aee-a0c0-695e78a15dd6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.130605] env[62476]: DEBUG oslo_concurrency.lockutils [None req-72828090-6104-4f7c-aeab-7193d1520870 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "27737774-efb5-4aee-a0c0-695e78a15dd6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.130826] env[62476]: DEBUG oslo_concurrency.lockutils [None req-72828090-6104-4f7c-aeab-7193d1520870 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "27737774-efb5-4aee-a0c0-695e78a15dd6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.133095] env[62476]: INFO nova.compute.manager [None req-72828090-6104-4f7c-aeab-7193d1520870 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Terminating instance [ 1941.135658] env[62476]: DEBUG nova.compute.manager [None req-72828090-6104-4f7c-aeab-7193d1520870 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1941.135658] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-72828090-6104-4f7c-aeab-7193d1520870 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1941.135934] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b511fef3-1f77-4ca0-b3d6-0436b557ead1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.145524] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93113a7-c8d0-4188-8013-5b71a5fe9028 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.156940] env[62476]: DEBUG nova.compute.manager [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1941.416369] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-72828090-6104-4f7c-aeab-7193d1520870 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 27737774-efb5-4aee-a0c0-695e78a15dd6 could not be found. [ 1941.416369] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-72828090-6104-4f7c-aeab-7193d1520870 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1941.416369] env[62476]: INFO nova.compute.manager [None req-72828090-6104-4f7c-aeab-7193d1520870 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1941.416369] env[62476]: DEBUG oslo.service.loopingcall [None req-72828090-6104-4f7c-aeab-7193d1520870 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1941.416369] env[62476]: DEBUG nova.compute.manager [-] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1941.416858] env[62476]: DEBUG nova.network.neutron [-] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1941.416858] env[62476]: DEBUG nova.network.neutron [-] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1941.416858] env[62476]: INFO nova.compute.manager [-] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] Took 0.03 seconds to deallocate network for instance. [ 1941.458820] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.459132] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.460636] env[62476]: INFO nova.compute.claims [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1941.524716] env[62476]: DEBUG oslo_concurrency.lockutils [None req-72828090-6104-4f7c-aeab-7193d1520870 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "27737774-efb5-4aee-a0c0-695e78a15dd6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.395s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.526418] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "27737774-efb5-4aee-a0c0-695e78a15dd6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 127.078s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.526547] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 27737774-efb5-4aee-a0c0-695e78a15dd6] During sync_power_state the instance has a pending task (deleting). Skip. [ 1941.526671] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "27737774-efb5-4aee-a0c0-695e78a15dd6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.674740] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9b9cd2-b8fc-487c-b193-4afe98e8d40b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.682646] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47db7a29-2f30-42bd-a9b8-ebdb246bc765 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.714718] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99026ad9-b1de-475e-bde5-466e2be58b4c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.722889] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c39a13-fd49-4ec2-88c3-f03e6fa5f863 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.736669] env[62476]: DEBUG nova.compute.provider_tree [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1941.745311] env[62476]: DEBUG nova.scheduler.client.report [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1941.761036] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.301s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.761036] env[62476]: DEBUG nova.compute.manager [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1941.799055] env[62476]: DEBUG nova.compute.utils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1941.799957] env[62476]: DEBUG nova.compute.manager [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1941.800148] env[62476]: DEBUG nova.network.neutron [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1941.813223] env[62476]: DEBUG nova.compute.manager [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1941.866246] env[62476]: DEBUG nova.policy [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9e4673294b1477d93bdae5dfef42927', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16d034f4180f4aeaa8f880c3e6767730', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1941.880296] env[62476]: DEBUG nova.compute.manager [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1941.907133] env[62476]: DEBUG nova.virt.hardware [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1941.907395] env[62476]: DEBUG nova.virt.hardware [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1941.907555] env[62476]: DEBUG nova.virt.hardware [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1941.907743] env[62476]: DEBUG nova.virt.hardware [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1941.907929] env[62476]: DEBUG nova.virt.hardware [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1941.908101] env[62476]: DEBUG nova.virt.hardware [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1941.908262] env[62476]: DEBUG nova.virt.hardware [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1941.908422] env[62476]: DEBUG nova.virt.hardware [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1941.908593] env[62476]: DEBUG nova.virt.hardware [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1941.908756] env[62476]: DEBUG nova.virt.hardware [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1941.908931] env[62476]: DEBUG nova.virt.hardware [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1941.909834] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9719ab4f-89c8-4a53-872d-594af7bd514c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.918929] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa1895b-cd01-4a28-9655-20b6cf553c33 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.191711] env[62476]: DEBUG nova.network.neutron [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Successfully created port: 68ad055b-0aac-4a2b-a03b-9ffd1c597968 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1942.913932] env[62476]: DEBUG nova.network.neutron [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Successfully updated port: 68ad055b-0aac-4a2b-a03b-9ffd1c597968 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1942.924968] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "refresh_cache-8fdd45f2-0c21-461f-896e-698182bd5337" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1942.925207] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired lock "refresh_cache-8fdd45f2-0c21-461f-896e-698182bd5337" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1942.925416] env[62476]: DEBUG nova.network.neutron [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1942.972129] env[62476]: DEBUG nova.network.neutron [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1943.031916] env[62476]: DEBUG nova.compute.manager [req-f5290990-9abe-407a-8ec7-b9279bf36bca req-aa1ea9cb-313c-4d2d-960d-2b3b94940724 service nova] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Received event network-vif-plugged-68ad055b-0aac-4a2b-a03b-9ffd1c597968 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1943.032157] env[62476]: DEBUG oslo_concurrency.lockutils [req-f5290990-9abe-407a-8ec7-b9279bf36bca req-aa1ea9cb-313c-4d2d-960d-2b3b94940724 service nova] Acquiring lock "8fdd45f2-0c21-461f-896e-698182bd5337-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1943.032361] env[62476]: DEBUG oslo_concurrency.lockutils [req-f5290990-9abe-407a-8ec7-b9279bf36bca req-aa1ea9cb-313c-4d2d-960d-2b3b94940724 service nova] Lock "8fdd45f2-0c21-461f-896e-698182bd5337-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1943.032525] env[62476]: DEBUG oslo_concurrency.lockutils [req-f5290990-9abe-407a-8ec7-b9279bf36bca req-aa1ea9cb-313c-4d2d-960d-2b3b94940724 service nova] Lock "8fdd45f2-0c21-461f-896e-698182bd5337-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1943.032691] env[62476]: DEBUG nova.compute.manager [req-f5290990-9abe-407a-8ec7-b9279bf36bca req-aa1ea9cb-313c-4d2d-960d-2b3b94940724 service nova] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] No waiting events found dispatching network-vif-plugged-68ad055b-0aac-4a2b-a03b-9ffd1c597968 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1943.032852] env[62476]: WARNING nova.compute.manager [req-f5290990-9abe-407a-8ec7-b9279bf36bca req-aa1ea9cb-313c-4d2d-960d-2b3b94940724 service nova] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Received unexpected event network-vif-plugged-68ad055b-0aac-4a2b-a03b-9ffd1c597968 for instance with vm_state building and task_state spawning. [ 1943.033195] env[62476]: DEBUG nova.compute.manager [req-f5290990-9abe-407a-8ec7-b9279bf36bca req-aa1ea9cb-313c-4d2d-960d-2b3b94940724 service nova] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Received event network-changed-68ad055b-0aac-4a2b-a03b-9ffd1c597968 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1943.033460] env[62476]: DEBUG nova.compute.manager [req-f5290990-9abe-407a-8ec7-b9279bf36bca req-aa1ea9cb-313c-4d2d-960d-2b3b94940724 service nova] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Refreshing instance network info cache due to event network-changed-68ad055b-0aac-4a2b-a03b-9ffd1c597968. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1943.033665] env[62476]: DEBUG oslo_concurrency.lockutils [req-f5290990-9abe-407a-8ec7-b9279bf36bca req-aa1ea9cb-313c-4d2d-960d-2b3b94940724 service nova] Acquiring lock "refresh_cache-8fdd45f2-0c21-461f-896e-698182bd5337" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1943.155494] env[62476]: DEBUG nova.network.neutron [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Updating instance_info_cache with network_info: [{"id": "68ad055b-0aac-4a2b-a03b-9ffd1c597968", "address": "fa:16:3e:f5:6b:0a", "network": {"id": "3ecf6641-8ea2-463b-b2bd-1da0bbd310ec", "bridge": "br-int", "label": "tempest-ImagesTestJSON-686261071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d034f4180f4aeaa8f880c3e6767730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11", "external-id": "nsx-vlan-transportzone-707", "segmentation_id": 707, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ad055b-0a", "ovs_interfaceid": "68ad055b-0aac-4a2b-a03b-9ffd1c597968", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1943.169165] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Releasing lock "refresh_cache-8fdd45f2-0c21-461f-896e-698182bd5337" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1943.169496] env[62476]: DEBUG nova.compute.manager [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Instance network_info: |[{"id": "68ad055b-0aac-4a2b-a03b-9ffd1c597968", "address": "fa:16:3e:f5:6b:0a", "network": {"id": "3ecf6641-8ea2-463b-b2bd-1da0bbd310ec", "bridge": "br-int", "label": "tempest-ImagesTestJSON-686261071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d034f4180f4aeaa8f880c3e6767730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11", "external-id": "nsx-vlan-transportzone-707", "segmentation_id": 707, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ad055b-0a", "ovs_interfaceid": "68ad055b-0aac-4a2b-a03b-9ffd1c597968", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1943.169812] env[62476]: DEBUG oslo_concurrency.lockutils [req-f5290990-9abe-407a-8ec7-b9279bf36bca req-aa1ea9cb-313c-4d2d-960d-2b3b94940724 service nova] Acquired lock "refresh_cache-8fdd45f2-0c21-461f-896e-698182bd5337" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1943.169990] env[62476]: DEBUG nova.network.neutron [req-f5290990-9abe-407a-8ec7-b9279bf36bca req-aa1ea9cb-313c-4d2d-960d-2b3b94940724 service nova] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Refreshing network info cache for port 68ad055b-0aac-4a2b-a03b-9ffd1c597968 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1943.171285] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:6b:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68ad055b-0aac-4a2b-a03b-9ffd1c597968', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1943.179694] env[62476]: DEBUG oslo.service.loopingcall [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1943.183360] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1943.183746] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-30efad8e-d905-4d4d-9b5a-00699f692c1f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.205778] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1943.205778] env[62476]: value = "task-4319184" [ 1943.205778] env[62476]: _type = "Task" [ 1943.205778] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.217133] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319184, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.490197] env[62476]: DEBUG nova.network.neutron [req-f5290990-9abe-407a-8ec7-b9279bf36bca req-aa1ea9cb-313c-4d2d-960d-2b3b94940724 service nova] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Updated VIF entry in instance network info cache for port 68ad055b-0aac-4a2b-a03b-9ffd1c597968. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1943.490756] env[62476]: DEBUG nova.network.neutron [req-f5290990-9abe-407a-8ec7-b9279bf36bca req-aa1ea9cb-313c-4d2d-960d-2b3b94940724 service nova] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Updating instance_info_cache with network_info: [{"id": "68ad055b-0aac-4a2b-a03b-9ffd1c597968", "address": "fa:16:3e:f5:6b:0a", "network": {"id": "3ecf6641-8ea2-463b-b2bd-1da0bbd310ec", "bridge": "br-int", "label": "tempest-ImagesTestJSON-686261071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d034f4180f4aeaa8f880c3e6767730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd98a25d-a7a9-4fb5-8fef-e8df4dbbbf11", "external-id": "nsx-vlan-transportzone-707", "segmentation_id": 707, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ad055b-0a", "ovs_interfaceid": "68ad055b-0aac-4a2b-a03b-9ffd1c597968", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1943.502019] env[62476]: DEBUG oslo_concurrency.lockutils [req-f5290990-9abe-407a-8ec7-b9279bf36bca req-aa1ea9cb-313c-4d2d-960d-2b3b94940724 service nova] Releasing lock "refresh_cache-8fdd45f2-0c21-461f-896e-698182bd5337" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1943.716470] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319184, 'name': CreateVM_Task, 'duration_secs': 0.320483} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.716701] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1943.717413] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1943.717651] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1943.718024] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1943.718327] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aef4c8bd-ce86-4185-90db-ee922246cda3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.723541] env[62476]: DEBUG oslo_vmware.api [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for the task: (returnval){ [ 1943.723541] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]521e2de6-76d7-2f90-9c7f-8561150f8629" [ 1943.723541] env[62476]: _type = "Task" [ 1943.723541] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.732354] env[62476]: DEBUG oslo_vmware.api [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]521e2de6-76d7-2f90-9c7f-8561150f8629, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.235019] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1944.235319] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1944.235543] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1961.028619] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1961.044392] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.044666] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1961.044819] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1961.045017] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1961.046246] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b534597-de6e-4deb-9cc1-159c024d72c7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.055285] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa5a0bf-9277-4e4f-8d18-07965ca5a1c7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.069625] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62bb340a-ff3b-4e2e-b7a9-8f704f3b3caa {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.076069] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb008a9e-af01-40fd-a536-b1005e023238 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.106357] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180711MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1961.106503] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.106679] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1961.187531] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance fe895d70-4c56-4854-83bf-a66cc1623d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1961.187691] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance c7e551af-a94e-48da-a725-53ebd73d43ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1961.187824] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ea606214-a34b-4972-8948-a6ff8c55b889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1961.187951] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1d67c106-ced2-4b4e-8abd-1652bd0509d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1961.188093] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e6b815fb-fa2d-4797-8810-c2b891f375cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1961.188216] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance cc61313f-d7db-4c5d-bb8e-1e516d2a89ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1961.188333] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 9497c622-7f14-4fc2-ac24-d611897a8be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1961.188446] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3462762c-09da-473b-b2ba-4dce6c99dd8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1961.188561] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1961.188679] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 8fdd45f2-0c21-461f-896e-698182bd5337 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1961.200131] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 11af6076-e985-477c-98a6-437843b26b02 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1961.200373] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1961.200527] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1961.340999] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb00edd-8885-4c2f-aeea-98f08636a41b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.348852] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8cd5bf4-55ea-47d2-8e18-f55a0cb3619d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.380555] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c0b64b-6aba-46e6-8fe1-8f73c38eced0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.389124] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8f0197-6fec-43c4-b81b-43ac22e5cdb2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.404489] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1961.414161] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1961.429664] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1961.429889] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.323s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.429387] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1963.429780] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1963.429780] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1963.450668] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1963.450876] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1963.451037] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1963.451113] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1963.451239] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1963.451398] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1963.451486] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1963.451627] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1963.451778] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1963.451909] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1963.452042] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1966.026605] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1966.026937] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1966.026990] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1967.027737] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1968.173145] env[62476]: DEBUG oslo_concurrency.lockutils [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquiring lock "2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1970.027601] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1971.023494] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1973.026593] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1973.026934] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1988.182054] env[62476]: WARNING oslo_vmware.rw_handles [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1988.182054] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1988.182054] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1988.182054] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1988.182054] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1988.182054] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 1988.182054] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1988.182054] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1988.182054] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1988.182054] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1988.182054] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1988.182054] env[62476]: ERROR oslo_vmware.rw_handles [ 1988.182054] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/2a5e492d-3552-4c9a-aae8-a3b60d230c17/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1988.183744] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1988.184043] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Copying Virtual Disk [datastore1] vmware_temp/2a5e492d-3552-4c9a-aae8-a3b60d230c17/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/2a5e492d-3552-4c9a-aae8-a3b60d230c17/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1988.184340] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da02eaf0-98b8-4de6-81a9-63eadb00e275 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.192312] env[62476]: DEBUG oslo_vmware.api [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Waiting for the task: (returnval){ [ 1988.192312] env[62476]: value = "task-4319185" [ 1988.192312] env[62476]: _type = "Task" [ 1988.192312] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.200626] env[62476]: DEBUG oslo_vmware.api [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Task: {'id': task-4319185, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.702880] env[62476]: DEBUG oslo_vmware.exceptions [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1988.703213] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1988.703774] env[62476]: ERROR nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1988.703774] env[62476]: Faults: ['InvalidArgument'] [ 1988.703774] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Traceback (most recent call last): [ 1988.703774] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1988.703774] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] yield resources [ 1988.703774] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1988.703774] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] self.driver.spawn(context, instance, image_meta, [ 1988.703774] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1988.703774] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1988.703774] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1988.703774] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] self._fetch_image_if_missing(context, vi) [ 1988.703774] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1988.704227] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] image_cache(vi, tmp_image_ds_loc) [ 1988.704227] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1988.704227] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] vm_util.copy_virtual_disk( [ 1988.704227] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1988.704227] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] session._wait_for_task(vmdk_copy_task) [ 1988.704227] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1988.704227] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] return self.wait_for_task(task_ref) [ 1988.704227] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1988.704227] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] return evt.wait() [ 1988.704227] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1988.704227] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] result = hub.switch() [ 1988.704227] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1988.704227] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] return self.greenlet.switch() [ 1988.704616] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1988.704616] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] self.f(*self.args, **self.kw) [ 1988.704616] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1988.704616] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] raise exceptions.translate_fault(task_info.error) [ 1988.704616] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1988.704616] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Faults: ['InvalidArgument'] [ 1988.704616] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] [ 1988.704616] env[62476]: INFO nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Terminating instance [ 1988.705708] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1988.706470] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1988.706470] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0abc2f02-6a33-4009-9572-574f1d1a7fbc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.708376] env[62476]: DEBUG nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1988.708580] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1988.709367] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ed82ad-f678-46d0-a14e-851ccde389af {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.716642] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1988.716922] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9a01f6a-c524-4bf3-898b-6bd2a22d8d2f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.719139] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1988.719321] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1988.720323] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61d5f5a2-c55d-4226-84fc-d9f7e992a2ad {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.725153] env[62476]: DEBUG oslo_vmware.api [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Waiting for the task: (returnval){ [ 1988.725153] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52ace6d1-7da4-0cb8-7d95-c66045d85e52" [ 1988.725153] env[62476]: _type = "Task" [ 1988.725153] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.732639] env[62476]: DEBUG oslo_vmware.api [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52ace6d1-7da4-0cb8-7d95-c66045d85e52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.782770] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1988.782973] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1988.783219] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Deleting the datastore file [datastore1] fe895d70-4c56-4854-83bf-a66cc1623d59 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1988.783506] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-612b21c6-59ae-497b-baac-97ddb346812d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.789754] env[62476]: DEBUG oslo_vmware.api [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Waiting for the task: (returnval){ [ 1988.789754] env[62476]: value = "task-4319187" [ 1988.789754] env[62476]: _type = "Task" [ 1988.789754] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.798869] env[62476]: DEBUG oslo_vmware.api [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Task: {'id': task-4319187, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.236109] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1989.236531] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Creating directory with path [datastore1] vmware_temp/6d91d880-f907-464a-ae3a-bb79dee2364a/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1989.236642] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1aa09fd7-f02e-4ff3-ba0d-b0792a320197 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.250046] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Created directory with path [datastore1] vmware_temp/6d91d880-f907-464a-ae3a-bb79dee2364a/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1989.250262] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Fetch image to [datastore1] vmware_temp/6d91d880-f907-464a-ae3a-bb79dee2364a/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1989.250436] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/6d91d880-f907-464a-ae3a-bb79dee2364a/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1989.251263] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65843991-dfba-41fd-86a2-78aab698d823 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.259368] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d88c3d-f1ed-47f4-829a-fdc2fd7a0333 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.270707] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9ede3e-f3e9-4cd9-9fc8-fa3831330566 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.305142] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74f6739-7ce1-431c-b15a-fbdec7b38c5f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.312430] env[62476]: DEBUG oslo_vmware.api [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Task: {'id': task-4319187, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084045} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.313883] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1989.314112] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1989.314302] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1989.314479] env[62476]: INFO nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1989.316327] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-15e0aac2-f44e-4f32-984b-1591965977cb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.318201] env[62476]: DEBUG nova.compute.claims [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1989.318382] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1989.318595] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1989.343178] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1989.402984] env[62476]: DEBUG oslo_vmware.rw_handles [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6d91d880-f907-464a-ae3a-bb79dee2364a/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1989.462263] env[62476]: DEBUG oslo_vmware.rw_handles [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1989.462488] env[62476]: DEBUG oslo_vmware.rw_handles [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6d91d880-f907-464a-ae3a-bb79dee2364a/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1989.568713] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e40359f-dbbf-47f3-a97b-a8372d915dfc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.576711] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd24166-f474-4c60-8cc8-57188545e6d3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.607225] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010bb3d7-b75b-4fad-b024-10c30c475535 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.614754] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daac8fd0-5efd-4229-874f-90b82c9e132e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.627971] env[62476]: DEBUG nova.compute.provider_tree [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1989.636461] env[62476]: DEBUG nova.scheduler.client.report [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1989.650887] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.332s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.651469] env[62476]: ERROR nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1989.651469] env[62476]: Faults: ['InvalidArgument'] [ 1989.651469] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Traceback (most recent call last): [ 1989.651469] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1989.651469] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] self.driver.spawn(context, instance, image_meta, [ 1989.651469] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1989.651469] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1989.651469] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1989.651469] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] self._fetch_image_if_missing(context, vi) [ 1989.651469] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1989.651469] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] image_cache(vi, tmp_image_ds_loc) [ 1989.651469] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1989.651907] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] vm_util.copy_virtual_disk( [ 1989.651907] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1989.651907] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] session._wait_for_task(vmdk_copy_task) [ 1989.651907] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1989.651907] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] return self.wait_for_task(task_ref) [ 1989.651907] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1989.651907] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] return evt.wait() [ 1989.651907] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1989.651907] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] result = hub.switch() [ 1989.651907] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1989.651907] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] return self.greenlet.switch() [ 1989.651907] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1989.651907] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] self.f(*self.args, **self.kw) [ 1989.652349] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1989.652349] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] raise exceptions.translate_fault(task_info.error) [ 1989.652349] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1989.652349] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Faults: ['InvalidArgument'] [ 1989.652349] env[62476]: ERROR nova.compute.manager [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] [ 1989.652349] env[62476]: DEBUG nova.compute.utils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1989.654833] env[62476]: DEBUG nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Build of instance fe895d70-4c56-4854-83bf-a66cc1623d59 was re-scheduled: A specified parameter was not correct: fileType [ 1989.654833] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1989.655299] env[62476]: DEBUG nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1989.655483] env[62476]: DEBUG nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1989.655663] env[62476]: DEBUG nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1989.655830] env[62476]: DEBUG nova.network.neutron [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1990.117913] env[62476]: DEBUG nova.network.neutron [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1990.132944] env[62476]: INFO nova.compute.manager [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Took 0.48 seconds to deallocate network for instance. [ 1990.242055] env[62476]: INFO nova.scheduler.client.report [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Deleted allocations for instance fe895d70-4c56-4854-83bf-a66cc1623d59 [ 1990.274044] env[62476]: DEBUG oslo_concurrency.lockutils [None req-38d8d8a0-a82f-4c78-b3bf-982a0d4fb2d4 tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Lock "fe895d70-4c56-4854-83bf-a66cc1623d59" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 611.352s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.276056] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab80b760-adff-4d39-b07c-8370d22cd33c tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Lock "fe895d70-4c56-4854-83bf-a66cc1623d59" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 415.994s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1990.276056] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab80b760-adff-4d39-b07c-8370d22cd33c tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Acquiring lock "fe895d70-4c56-4854-83bf-a66cc1623d59-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1990.276056] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab80b760-adff-4d39-b07c-8370d22cd33c tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Lock "fe895d70-4c56-4854-83bf-a66cc1623d59-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1990.276328] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab80b760-adff-4d39-b07c-8370d22cd33c tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Lock "fe895d70-4c56-4854-83bf-a66cc1623d59-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.278154] env[62476]: INFO nova.compute.manager [None req-ab80b760-adff-4d39-b07c-8370d22cd33c tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Terminating instance [ 1990.279894] env[62476]: DEBUG nova.compute.manager [None req-ab80b760-adff-4d39-b07c-8370d22cd33c tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1990.280129] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab80b760-adff-4d39-b07c-8370d22cd33c tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1990.280662] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ac7e289f-53ef-4958-900c-57d283435890 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.291235] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b7b46b-4387-429f-8d50-53577ea942a1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.302462] env[62476]: DEBUG nova.compute.manager [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1990.325188] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-ab80b760-adff-4d39-b07c-8370d22cd33c tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fe895d70-4c56-4854-83bf-a66cc1623d59 could not be found. [ 1990.325416] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-ab80b760-adff-4d39-b07c-8370d22cd33c tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1990.325595] env[62476]: INFO nova.compute.manager [None req-ab80b760-adff-4d39-b07c-8370d22cd33c tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1990.325841] env[62476]: DEBUG oslo.service.loopingcall [None req-ab80b760-adff-4d39-b07c-8370d22cd33c tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1990.326074] env[62476]: DEBUG nova.compute.manager [-] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1990.326175] env[62476]: DEBUG nova.network.neutron [-] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1990.352565] env[62476]: DEBUG nova.network.neutron [-] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1990.357645] env[62476]: DEBUG oslo_concurrency.lockutils [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1990.357881] env[62476]: DEBUG oslo_concurrency.lockutils [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1990.359648] env[62476]: INFO nova.compute.claims [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1990.362801] env[62476]: INFO nova.compute.manager [-] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] Took 0.04 seconds to deallocate network for instance. [ 1990.458437] env[62476]: DEBUG oslo_concurrency.lockutils [None req-ab80b760-adff-4d39-b07c-8370d22cd33c tempest-ServerMetadataNegativeTestJSON-1337536820 tempest-ServerMetadataNegativeTestJSON-1337536820-project-member] Lock "fe895d70-4c56-4854-83bf-a66cc1623d59" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.183s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.459669] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "fe895d70-4c56-4854-83bf-a66cc1623d59" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 176.011s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1990.459669] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: fe895d70-4c56-4854-83bf-a66cc1623d59] During sync_power_state the instance has a pending task (deleting). Skip. [ 1990.459669] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "fe895d70-4c56-4854-83bf-a66cc1623d59" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.546619] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05eca8b2-657c-46b6-814e-64853417d25a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.554889] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a1ec7b-8259-43e4-8703-e0dc75e6235d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.585753] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f20f18-8f86-40ec-8618-f8bae9b486f7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.594236] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce80833-4bec-45b2-be38-8245c1d23095 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.608592] env[62476]: DEBUG nova.compute.provider_tree [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1990.617680] env[62476]: DEBUG nova.scheduler.client.report [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1990.634856] env[62476]: DEBUG oslo_concurrency.lockutils [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.277s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.635380] env[62476]: DEBUG nova.compute.manager [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1990.671401] env[62476]: DEBUG nova.compute.utils [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1990.673103] env[62476]: DEBUG nova.compute.manager [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1990.673664] env[62476]: DEBUG nova.network.neutron [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1990.681927] env[62476]: DEBUG nova.compute.manager [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1990.738037] env[62476]: DEBUG nova.policy [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a117f106402424280e477babc21990c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f16c7f1cb3ec41ffbdd622e3ee5992ec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 1990.750492] env[62476]: DEBUG nova.compute.manager [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1990.776928] env[62476]: DEBUG nova.virt.hardware [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1990.777209] env[62476]: DEBUG nova.virt.hardware [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1990.777406] env[62476]: DEBUG nova.virt.hardware [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1990.777629] env[62476]: DEBUG nova.virt.hardware [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1990.777784] env[62476]: DEBUG nova.virt.hardware [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1990.777931] env[62476]: DEBUG nova.virt.hardware [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1990.778158] env[62476]: DEBUG nova.virt.hardware [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1990.778347] env[62476]: DEBUG nova.virt.hardware [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1990.778530] env[62476]: DEBUG nova.virt.hardware [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1990.778718] env[62476]: DEBUG nova.virt.hardware [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1990.778897] env[62476]: DEBUG nova.virt.hardware [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1990.779823] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-043ad305-6dad-436c-97a1-646a0613554f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.789372] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4e1e14-51b3-404f-ab36-97a701ec9136 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.050033] env[62476]: DEBUG nova.network.neutron [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] Successfully created port: a40e0111-1335-4bd3-ac12-2c3609e3b09a {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1991.807885] env[62476]: DEBUG nova.network.neutron [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] Successfully updated port: a40e0111-1335-4bd3-ac12-2c3609e3b09a {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1991.820174] env[62476]: DEBUG oslo_concurrency.lockutils [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "refresh_cache-11af6076-e985-477c-98a6-437843b26b02" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1991.820335] env[62476]: DEBUG oslo_concurrency.lockutils [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired lock "refresh_cache-11af6076-e985-477c-98a6-437843b26b02" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1991.820485] env[62476]: DEBUG nova.network.neutron [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1991.870136] env[62476]: DEBUG nova.network.neutron [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1992.130338] env[62476]: DEBUG nova.network.neutron [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] Updating instance_info_cache with network_info: [{"id": "a40e0111-1335-4bd3-ac12-2c3609e3b09a", "address": "fa:16:3e:7b:de:21", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa40e0111-13", "ovs_interfaceid": "a40e0111-1335-4bd3-ac12-2c3609e3b09a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1992.145774] env[62476]: DEBUG oslo_concurrency.lockutils [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Releasing lock "refresh_cache-11af6076-e985-477c-98a6-437843b26b02" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1992.146137] env[62476]: DEBUG nova.compute.manager [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] Instance network_info: |[{"id": "a40e0111-1335-4bd3-ac12-2c3609e3b09a", "address": "fa:16:3e:7b:de:21", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa40e0111-13", "ovs_interfaceid": "a40e0111-1335-4bd3-ac12-2c3609e3b09a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1992.146696] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:de:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3734b156-0f7d-4721-b23c-d000412ec2eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a40e0111-1335-4bd3-ac12-2c3609e3b09a', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1992.154364] env[62476]: DEBUG oslo.service.loopingcall [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1992.156117] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11af6076-e985-477c-98a6-437843b26b02] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1992.157308] env[62476]: DEBUG nova.compute.manager [req-8491569b-619a-4013-afa7-180d9b2fafaa req-1104c933-efe4-4b46-9fd6-944dcca23d62 service nova] [instance: 11af6076-e985-477c-98a6-437843b26b02] Received event network-vif-plugged-a40e0111-1335-4bd3-ac12-2c3609e3b09a {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1992.157508] env[62476]: DEBUG oslo_concurrency.lockutils [req-8491569b-619a-4013-afa7-180d9b2fafaa req-1104c933-efe4-4b46-9fd6-944dcca23d62 service nova] Acquiring lock "11af6076-e985-477c-98a6-437843b26b02-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1992.157716] env[62476]: DEBUG oslo_concurrency.lockutils [req-8491569b-619a-4013-afa7-180d9b2fafaa req-1104c933-efe4-4b46-9fd6-944dcca23d62 service nova] Lock "11af6076-e985-477c-98a6-437843b26b02-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.157886] env[62476]: DEBUG oslo_concurrency.lockutils [req-8491569b-619a-4013-afa7-180d9b2fafaa req-1104c933-efe4-4b46-9fd6-944dcca23d62 service nova] Lock "11af6076-e985-477c-98a6-437843b26b02-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.158071] env[62476]: DEBUG nova.compute.manager [req-8491569b-619a-4013-afa7-180d9b2fafaa req-1104c933-efe4-4b46-9fd6-944dcca23d62 service nova] [instance: 11af6076-e985-477c-98a6-437843b26b02] No waiting events found dispatching network-vif-plugged-a40e0111-1335-4bd3-ac12-2c3609e3b09a {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1992.158247] env[62476]: WARNING nova.compute.manager [req-8491569b-619a-4013-afa7-180d9b2fafaa req-1104c933-efe4-4b46-9fd6-944dcca23d62 service nova] [instance: 11af6076-e985-477c-98a6-437843b26b02] Received unexpected event network-vif-plugged-a40e0111-1335-4bd3-ac12-2c3609e3b09a for instance with vm_state building and task_state spawning. [ 1992.158407] env[62476]: DEBUG nova.compute.manager [req-8491569b-619a-4013-afa7-180d9b2fafaa req-1104c933-efe4-4b46-9fd6-944dcca23d62 service nova] [instance: 11af6076-e985-477c-98a6-437843b26b02] Received event network-changed-a40e0111-1335-4bd3-ac12-2c3609e3b09a {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1992.158560] env[62476]: DEBUG nova.compute.manager [req-8491569b-619a-4013-afa7-180d9b2fafaa req-1104c933-efe4-4b46-9fd6-944dcca23d62 service nova] [instance: 11af6076-e985-477c-98a6-437843b26b02] Refreshing instance network info cache due to event network-changed-a40e0111-1335-4bd3-ac12-2c3609e3b09a. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1992.158742] env[62476]: DEBUG oslo_concurrency.lockutils [req-8491569b-619a-4013-afa7-180d9b2fafaa req-1104c933-efe4-4b46-9fd6-944dcca23d62 service nova] Acquiring lock "refresh_cache-11af6076-e985-477c-98a6-437843b26b02" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1992.158878] env[62476]: DEBUG oslo_concurrency.lockutils [req-8491569b-619a-4013-afa7-180d9b2fafaa req-1104c933-efe4-4b46-9fd6-944dcca23d62 service nova] Acquired lock "refresh_cache-11af6076-e985-477c-98a6-437843b26b02" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1992.159045] env[62476]: DEBUG nova.network.neutron [req-8491569b-619a-4013-afa7-180d9b2fafaa req-1104c933-efe4-4b46-9fd6-944dcca23d62 service nova] [instance: 11af6076-e985-477c-98a6-437843b26b02] Refreshing network info cache for port a40e0111-1335-4bd3-ac12-2c3609e3b09a {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1992.160113] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d30076e-4b3c-4395-bb32-98aab9ae6fe8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.182987] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1992.182987] env[62476]: value = "task-4319188" [ 1992.182987] env[62476]: _type = "Task" [ 1992.182987] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.192132] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319188, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.694050] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319188, 'name': CreateVM_Task, 'duration_secs': 0.305676} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.694284] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11af6076-e985-477c-98a6-437843b26b02] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1992.695049] env[62476]: DEBUG oslo_concurrency.lockutils [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1992.695347] env[62476]: DEBUG oslo_concurrency.lockutils [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1992.695735] env[62476]: DEBUG oslo_concurrency.lockutils [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1992.696349] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-687a53a3-ebe9-4ee5-9ac4-c584c6d2b040 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.701479] env[62476]: DEBUG oslo_vmware.api [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for the task: (returnval){ [ 1992.701479] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52f019da-7a6d-e45b-d7df-e7f61fdec8d2" [ 1992.701479] env[62476]: _type = "Task" [ 1992.701479] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.705229] env[62476]: DEBUG nova.network.neutron [req-8491569b-619a-4013-afa7-180d9b2fafaa req-1104c933-efe4-4b46-9fd6-944dcca23d62 service nova] [instance: 11af6076-e985-477c-98a6-437843b26b02] Updated VIF entry in instance network info cache for port a40e0111-1335-4bd3-ac12-2c3609e3b09a. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1992.705533] env[62476]: DEBUG nova.network.neutron [req-8491569b-619a-4013-afa7-180d9b2fafaa req-1104c933-efe4-4b46-9fd6-944dcca23d62 service nova] [instance: 11af6076-e985-477c-98a6-437843b26b02] Updating instance_info_cache with network_info: [{"id": "a40e0111-1335-4bd3-ac12-2c3609e3b09a", "address": "fa:16:3e:7b:de:21", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa40e0111-13", "ovs_interfaceid": "a40e0111-1335-4bd3-ac12-2c3609e3b09a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1992.712384] env[62476]: DEBUG oslo_vmware.api [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52f019da-7a6d-e45b-d7df-e7f61fdec8d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.715032] env[62476]: DEBUG oslo_concurrency.lockutils [req-8491569b-619a-4013-afa7-180d9b2fafaa req-1104c933-efe4-4b46-9fd6-944dcca23d62 service nova] Releasing lock "refresh_cache-11af6076-e985-477c-98a6-437843b26b02" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1993.213396] env[62476]: DEBUG oslo_concurrency.lockutils [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1993.213898] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 11af6076-e985-477c-98a6-437843b26b02] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1993.213898] env[62476]: DEBUG oslo_concurrency.lockutils [None req-202dbec3-246e-45a3-8672-848454a389e1 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.176528] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Acquiring lock "56a5da15-57da-4d4d-a359-d90b780f67e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.176904] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Lock "56a5da15-57da-4d4d-a359-d90b780f67e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2022.027443] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2022.040987] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.041250] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2022.041446] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2022.041613] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2022.042798] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aff26c5-83a8-431b-894f-9f0d504a72e0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.053033] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6da2bcb-e7da-4ada-accb-e2449075f65e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.069578] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec79d886-f585-429a-927b-e9fa4479b396 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.078232] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb067e6-9414-415f-8275-61f1a40e02d9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.111061] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180714MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2022.111264] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.111368] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2022.186155] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance c7e551af-a94e-48da-a725-53ebd73d43ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2022.186155] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ea606214-a34b-4972-8948-a6ff8c55b889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2022.186591] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1d67c106-ced2-4b4e-8abd-1652bd0509d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2022.186591] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e6b815fb-fa2d-4797-8810-c2b891f375cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2022.186591] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance cc61313f-d7db-4c5d-bb8e-1e516d2a89ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2022.186764] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 9497c622-7f14-4fc2-ac24-d611897a8be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2022.186764] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3462762c-09da-473b-b2ba-4dce6c99dd8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2022.186865] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2022.186935] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 8fdd45f2-0c21-461f-896e-698182bd5337 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2022.187060] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 11af6076-e985-477c-98a6-437843b26b02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2022.199991] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 56a5da15-57da-4d4d-a359-d90b780f67e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2022.199991] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2022.200224] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2022.345883] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb66f98-9d44-4196-acac-978708033874 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.353665] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce49eb3-1ac8-4163-a15f-628a1cc0d2de {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.383998] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c081801-8400-4e6d-8266-f164ee0dee78 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.392015] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0c93b9-4f06-4d57-8a1d-e0ca055a3423 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.407491] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2022.417027] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2022.431973] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2022.432199] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.321s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.433039] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2024.433039] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2024.433039] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2024.456744] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2024.456915] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2024.457090] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2024.457357] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2024.457404] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2024.457492] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2024.457615] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2024.457733] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2024.457851] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2024.457968] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 11af6076-e985-477c-98a6-437843b26b02] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2024.458101] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2026.027540] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2026.027965] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2026.027965] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2027.028168] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2032.027884] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2033.023288] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2033.026929] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2033.027141] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2035.576067] env[62476]: WARNING oslo_vmware.rw_handles [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2035.576067] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2035.576067] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2035.576067] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2035.576067] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2035.576067] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 2035.576067] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2035.576067] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2035.576067] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2035.576067] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2035.576067] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2035.576067] env[62476]: ERROR oslo_vmware.rw_handles [ 2035.576067] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/6d91d880-f907-464a-ae3a-bb79dee2364a/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2035.577522] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2035.577800] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Copying Virtual Disk [datastore1] vmware_temp/6d91d880-f907-464a-ae3a-bb79dee2364a/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/6d91d880-f907-464a-ae3a-bb79dee2364a/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2035.578139] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ce0eb8b-16b6-41cd-a384-0b573ca078a3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.586106] env[62476]: DEBUG oslo_vmware.api [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Waiting for the task: (returnval){ [ 2035.586106] env[62476]: value = "task-4319189" [ 2035.586106] env[62476]: _type = "Task" [ 2035.586106] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.594256] env[62476]: DEBUG oslo_vmware.api [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Task: {'id': task-4319189, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.096295] env[62476]: DEBUG oslo_vmware.exceptions [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2036.096295] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2036.096610] env[62476]: ERROR nova.compute.manager [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2036.096610] env[62476]: Faults: ['InvalidArgument'] [ 2036.096610] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Traceback (most recent call last): [ 2036.096610] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2036.096610] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] yield resources [ 2036.096610] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2036.096610] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] self.driver.spawn(context, instance, image_meta, [ 2036.096610] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2036.096610] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2036.096610] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2036.096610] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] self._fetch_image_if_missing(context, vi) [ 2036.096610] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2036.097038] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] image_cache(vi, tmp_image_ds_loc) [ 2036.097038] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2036.097038] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] vm_util.copy_virtual_disk( [ 2036.097038] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2036.097038] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] session._wait_for_task(vmdk_copy_task) [ 2036.097038] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2036.097038] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] return self.wait_for_task(task_ref) [ 2036.097038] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2036.097038] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] return evt.wait() [ 2036.097038] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2036.097038] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] result = hub.switch() [ 2036.097038] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2036.097038] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] return self.greenlet.switch() [ 2036.097425] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2036.097425] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] self.f(*self.args, **self.kw) [ 2036.097425] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2036.097425] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] raise exceptions.translate_fault(task_info.error) [ 2036.097425] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2036.097425] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Faults: ['InvalidArgument'] [ 2036.097425] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] [ 2036.097425] env[62476]: INFO nova.compute.manager [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Terminating instance [ 2036.098524] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2036.098737] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2036.098975] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f90341d0-d975-4ba0-bbd8-a676f39f6d5a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.101009] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquiring lock "refresh_cache-c7e551af-a94e-48da-a725-53ebd73d43ee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2036.101183] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquired lock "refresh_cache-c7e551af-a94e-48da-a725-53ebd73d43ee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2036.101348] env[62476]: DEBUG nova.network.neutron [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2036.112111] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2036.112298] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2036.114013] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0ed57d4-9b34-4f25-a185-ef3c03fdc747 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.119075] env[62476]: DEBUG oslo_vmware.api [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for the task: (returnval){ [ 2036.119075] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52019599-d0a6-e3e0-f2d9-0ec46dfb6eb2" [ 2036.119075] env[62476]: _type = "Task" [ 2036.119075] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.128813] env[62476]: DEBUG oslo_vmware.api [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52019599-d0a6-e3e0-f2d9-0ec46dfb6eb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.131991] env[62476]: DEBUG nova.network.neutron [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2036.229606] env[62476]: DEBUG nova.network.neutron [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2036.239978] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Releasing lock "refresh_cache-c7e551af-a94e-48da-a725-53ebd73d43ee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2036.240455] env[62476]: DEBUG nova.compute.manager [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2036.240678] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2036.241802] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd0ed05-ecb8-4c25-85bc-8cd07b4f98e9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.250334] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2036.250583] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f874c339-af99-490c-93d0-affc21df1679 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.287453] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2036.287664] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2036.287867] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Deleting the datastore file [datastore1] c7e551af-a94e-48da-a725-53ebd73d43ee {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2036.288170] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df68a850-2ccc-4824-921f-8ad6d914fd98 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.294863] env[62476]: DEBUG oslo_vmware.api [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Waiting for the task: (returnval){ [ 2036.294863] env[62476]: value = "task-4319191" [ 2036.294863] env[62476]: _type = "Task" [ 2036.294863] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.302930] env[62476]: DEBUG oslo_vmware.api [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Task: {'id': task-4319191, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.630032] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2036.630032] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Creating directory with path [datastore1] vmware_temp/29050c86-30a4-4c47-a2b6-24612259bb6b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2036.630407] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2d44e7c-55b5-444d-ad16-8e1d8bb210a4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.643928] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Created directory with path [datastore1] vmware_temp/29050c86-30a4-4c47-a2b6-24612259bb6b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2036.644142] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Fetch image to [datastore1] vmware_temp/29050c86-30a4-4c47-a2b6-24612259bb6b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2036.644311] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/29050c86-30a4-4c47-a2b6-24612259bb6b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2036.645100] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4338044-32dd-4522-9791-9baea413e4f2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.651996] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e698178-be1a-4dab-9406-1680603860e2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.661095] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eba5694-c85c-41ba-a599-71549da29078 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.693590] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1b41c2-b397-42ec-839e-0d7894d1fc3f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.700597] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d3a8d546-107c-4673-938c-f8b55e6f3e45 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.726022] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2036.776459] env[62476]: DEBUG oslo_vmware.rw_handles [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/29050c86-30a4-4c47-a2b6-24612259bb6b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2036.837023] env[62476]: DEBUG oslo_vmware.rw_handles [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2036.837272] env[62476]: DEBUG oslo_vmware.rw_handles [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/29050c86-30a4-4c47-a2b6-24612259bb6b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2036.841326] env[62476]: DEBUG oslo_vmware.api [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Task: {'id': task-4319191, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102008} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.841666] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2036.841895] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2036.842122] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2036.842345] env[62476]: INFO nova.compute.manager [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2036.842608] env[62476]: DEBUG oslo.service.loopingcall [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2036.842848] env[62476]: DEBUG nova.compute.manager [-] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Skipping network deallocation for instance since networking was not requested. {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2036.845203] env[62476]: DEBUG nova.compute.claims [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2036.845439] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2036.845676] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.035903] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a527fa82-3664-4122-96c1-b782aaa92c8e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.043689] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939b2f40-3ef4-40c0-813d-96b234481087 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.073306] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93bd13ed-d253-4ba2-a826-5d178e2b7f0b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.080790] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9f56f1-5c8b-4c15-9faf-8489375cd26d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.095583] env[62476]: DEBUG nova.compute.provider_tree [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2037.104779] env[62476]: DEBUG nova.scheduler.client.report [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2037.119008] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.273s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.119543] env[62476]: ERROR nova.compute.manager [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2037.119543] env[62476]: Faults: ['InvalidArgument'] [ 2037.119543] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Traceback (most recent call last): [ 2037.119543] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2037.119543] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] self.driver.spawn(context, instance, image_meta, [ 2037.119543] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2037.119543] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2037.119543] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2037.119543] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] self._fetch_image_if_missing(context, vi) [ 2037.119543] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2037.119543] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] image_cache(vi, tmp_image_ds_loc) [ 2037.119543] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2037.119922] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] vm_util.copy_virtual_disk( [ 2037.119922] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2037.119922] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] session._wait_for_task(vmdk_copy_task) [ 2037.119922] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2037.119922] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] return self.wait_for_task(task_ref) [ 2037.119922] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2037.119922] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] return evt.wait() [ 2037.119922] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2037.119922] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] result = hub.switch() [ 2037.119922] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2037.119922] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] return self.greenlet.switch() [ 2037.119922] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2037.119922] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] self.f(*self.args, **self.kw) [ 2037.120353] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2037.120353] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] raise exceptions.translate_fault(task_info.error) [ 2037.120353] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2037.120353] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Faults: ['InvalidArgument'] [ 2037.120353] env[62476]: ERROR nova.compute.manager [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] [ 2037.120353] env[62476]: DEBUG nova.compute.utils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2037.121712] env[62476]: DEBUG nova.compute.manager [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Build of instance c7e551af-a94e-48da-a725-53ebd73d43ee was re-scheduled: A specified parameter was not correct: fileType [ 2037.121712] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2037.122099] env[62476]: DEBUG nova.compute.manager [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2037.122319] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquiring lock "refresh_cache-c7e551af-a94e-48da-a725-53ebd73d43ee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2037.122466] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquired lock "refresh_cache-c7e551af-a94e-48da-a725-53ebd73d43ee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2037.122629] env[62476]: DEBUG nova.network.neutron [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2037.147376] env[62476]: DEBUG nova.network.neutron [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2037.205295] env[62476]: DEBUG nova.network.neutron [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.214321] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Releasing lock "refresh_cache-c7e551af-a94e-48da-a725-53ebd73d43ee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2037.214600] env[62476]: DEBUG nova.compute.manager [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2037.214797] env[62476]: DEBUG nova.compute.manager [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Skipping network deallocation for instance since networking was not requested. {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2037.307081] env[62476]: INFO nova.scheduler.client.report [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Deleted allocations for instance c7e551af-a94e-48da-a725-53ebd73d43ee [ 2037.329732] env[62476]: DEBUG oslo_concurrency.lockutils [None req-cee7f5cd-f8da-417f-bb02-a77268dbe67c tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Lock "c7e551af-a94e-48da-a725-53ebd73d43ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 591.846s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.330903] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Lock "c7e551af-a94e-48da-a725-53ebd73d43ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 396.005s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.331131] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquiring lock "c7e551af-a94e-48da-a725-53ebd73d43ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.331330] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Lock "c7e551af-a94e-48da-a725-53ebd73d43ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.331502] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Lock "c7e551af-a94e-48da-a725-53ebd73d43ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.333714] env[62476]: INFO nova.compute.manager [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Terminating instance [ 2037.335477] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquiring lock "refresh_cache-c7e551af-a94e-48da-a725-53ebd73d43ee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2037.335790] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Acquired lock "refresh_cache-c7e551af-a94e-48da-a725-53ebd73d43ee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2037.335988] env[62476]: DEBUG nova.network.neutron [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2037.350471] env[62476]: DEBUG nova.compute.manager [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2037.364339] env[62476]: DEBUG nova.network.neutron [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2037.410811] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.411089] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.412913] env[62476]: INFO nova.compute.claims [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2037.443195] env[62476]: DEBUG nova.network.neutron [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.451774] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Releasing lock "refresh_cache-c7e551af-a94e-48da-a725-53ebd73d43ee" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2037.452198] env[62476]: DEBUG nova.compute.manager [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2037.452397] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2037.452897] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-53705379-82be-4d5c-80f1-f91732c1934d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.465258] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd7fb5c-520c-4b15-8191-4ca43bd09ea1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.496110] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c7e551af-a94e-48da-a725-53ebd73d43ee could not be found. [ 2037.496328] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2037.496506] env[62476]: INFO nova.compute.manager [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2037.496757] env[62476]: DEBUG oslo.service.loopingcall [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2037.499267] env[62476]: DEBUG nova.compute.manager [-] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2037.499371] env[62476]: DEBUG nova.network.neutron [-] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2037.519273] env[62476]: DEBUG nova.network.neutron [-] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2037.528115] env[62476]: DEBUG nova.network.neutron [-] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.538317] env[62476]: INFO nova.compute.manager [-] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] Took 0.04 seconds to deallocate network for instance. [ 2037.616294] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84812055-4c61-46dd-8dc7-89c5816fb3cc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.624782] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74dcf71b-e74f-4321-a3a4-5c3ca5a9a1ef {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.656920] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977c8729-80e3-4fa7-ad2d-f1672fd06168 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.659680] env[62476]: DEBUG oslo_concurrency.lockutils [None req-e1cc407c-f2b8-4b15-a0b0-9515b319e83a tempest-ServersAaction247Test-405319489 tempest-ServersAaction247Test-405319489-project-member] Lock "c7e551af-a94e-48da-a725-53ebd73d43ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.329s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.661175] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "c7e551af-a94e-48da-a725-53ebd73d43ee" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 223.213s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.661369] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: c7e551af-a94e-48da-a725-53ebd73d43ee] During sync_power_state the instance has a pending task (deleting). Skip. [ 2037.661543] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "c7e551af-a94e-48da-a725-53ebd73d43ee" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.667997] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae30533-b30a-4a93-875c-34dd848d5d0a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.684571] env[62476]: DEBUG nova.compute.provider_tree [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2037.695850] env[62476]: DEBUG nova.scheduler.client.report [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2037.713411] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.302s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.713703] env[62476]: DEBUG nova.compute.manager [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2037.751758] env[62476]: DEBUG nova.compute.utils [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2037.753155] env[62476]: DEBUG nova.compute.manager [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2037.753358] env[62476]: DEBUG nova.network.neutron [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2037.763040] env[62476]: DEBUG nova.compute.manager [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2037.824147] env[62476]: DEBUG nova.policy [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '51aa02bb178f49779e7ae4262db0bcf1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43c3514f06db4f73bb8107310a9e8d2e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 2037.828529] env[62476]: DEBUG nova.compute.manager [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2037.854791] env[62476]: DEBUG nova.virt.hardware [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2037.855052] env[62476]: DEBUG nova.virt.hardware [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2037.855214] env[62476]: DEBUG nova.virt.hardware [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2037.855397] env[62476]: DEBUG nova.virt.hardware [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2037.855546] env[62476]: DEBUG nova.virt.hardware [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2037.855694] env[62476]: DEBUG nova.virt.hardware [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2037.855897] env[62476]: DEBUG nova.virt.hardware [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2037.856073] env[62476]: DEBUG nova.virt.hardware [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2037.856244] env[62476]: DEBUG nova.virt.hardware [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2037.856406] env[62476]: DEBUG nova.virt.hardware [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2037.856576] env[62476]: DEBUG nova.virt.hardware [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2037.857494] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e15855-cf9a-4116-a102-e23dc30decac {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.866516] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e0663b-af4c-4d79-839c-4ef4034c4559 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.242093] env[62476]: DEBUG nova.network.neutron [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Successfully created port: 5ce25b57-38a6-4c48-818a-7017cdcf640c {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2038.864645] env[62476]: DEBUG nova.compute.manager [req-0bf46a88-2365-428f-8936-0c640ed17f63 req-edb6e097-089d-429b-b13d-30234d2ed06d service nova] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Received event network-vif-plugged-5ce25b57-38a6-4c48-818a-7017cdcf640c {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2038.864645] env[62476]: DEBUG oslo_concurrency.lockutils [req-0bf46a88-2365-428f-8936-0c640ed17f63 req-edb6e097-089d-429b-b13d-30234d2ed06d service nova] Acquiring lock "56a5da15-57da-4d4d-a359-d90b780f67e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.864645] env[62476]: DEBUG oslo_concurrency.lockutils [req-0bf46a88-2365-428f-8936-0c640ed17f63 req-edb6e097-089d-429b-b13d-30234d2ed06d service nova] Lock "56a5da15-57da-4d4d-a359-d90b780f67e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.864645] env[62476]: DEBUG oslo_concurrency.lockutils [req-0bf46a88-2365-428f-8936-0c640ed17f63 req-edb6e097-089d-429b-b13d-30234d2ed06d service nova] Lock "56a5da15-57da-4d4d-a359-d90b780f67e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.865185] env[62476]: DEBUG nova.compute.manager [req-0bf46a88-2365-428f-8936-0c640ed17f63 req-edb6e097-089d-429b-b13d-30234d2ed06d service nova] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] No waiting events found dispatching network-vif-plugged-5ce25b57-38a6-4c48-818a-7017cdcf640c {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2038.865185] env[62476]: WARNING nova.compute.manager [req-0bf46a88-2365-428f-8936-0c640ed17f63 req-edb6e097-089d-429b-b13d-30234d2ed06d service nova] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Received unexpected event network-vif-plugged-5ce25b57-38a6-4c48-818a-7017cdcf640c for instance with vm_state building and task_state spawning. [ 2038.935895] env[62476]: DEBUG nova.network.neutron [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Successfully updated port: 5ce25b57-38a6-4c48-818a-7017cdcf640c {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2038.950523] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Acquiring lock "refresh_cache-56a5da15-57da-4d4d-a359-d90b780f67e4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2038.951238] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Acquired lock "refresh_cache-56a5da15-57da-4d4d-a359-d90b780f67e4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2038.951238] env[62476]: DEBUG nova.network.neutron [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2039.012142] env[62476]: DEBUG nova.network.neutron [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2039.221614] env[62476]: DEBUG nova.network.neutron [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Updating instance_info_cache with network_info: [{"id": "5ce25b57-38a6-4c48-818a-7017cdcf640c", "address": "fa:16:3e:50:16:14", "network": {"id": "f3c02f39-695a-4c59-8067-f729d5445329", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2118224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43c3514f06db4f73bb8107310a9e8d2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ce25b57-38", "ovs_interfaceid": "5ce25b57-38a6-4c48-818a-7017cdcf640c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2039.235618] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Releasing lock "refresh_cache-56a5da15-57da-4d4d-a359-d90b780f67e4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2039.235944] env[62476]: DEBUG nova.compute.manager [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Instance network_info: |[{"id": "5ce25b57-38a6-4c48-818a-7017cdcf640c", "address": "fa:16:3e:50:16:14", "network": {"id": "f3c02f39-695a-4c59-8067-f729d5445329", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2118224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43c3514f06db4f73bb8107310a9e8d2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ce25b57-38", "ovs_interfaceid": "5ce25b57-38a6-4c48-818a-7017cdcf640c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2039.236395] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:16:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2907cce-d529-4809-af05-d29397bed211', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5ce25b57-38a6-4c48-818a-7017cdcf640c', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2039.245925] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Creating folder: Project (43c3514f06db4f73bb8107310a9e8d2e). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2039.245925] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9c2d542-8d25-41a8-adf5-bd98da701141 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.257176] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Created folder: Project (43c3514f06db4f73bb8107310a9e8d2e) in parent group-v849485. [ 2039.257375] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Creating folder: Instances. Parent ref: group-v849582. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2039.257628] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d2f4124-fc77-484d-b7f8-61099ec16aec {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.266656] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Created folder: Instances in parent group-v849582. [ 2039.266827] env[62476]: DEBUG oslo.service.loopingcall [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2039.266977] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2039.267162] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a41de7f-14f2-4f9a-a3fe-75d082aece88 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.288015] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2039.288015] env[62476]: value = "task-4319194" [ 2039.288015] env[62476]: _type = "Task" [ 2039.288015] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.296189] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319194, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.798384] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319194, 'name': CreateVM_Task, 'duration_secs': 0.312059} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.798649] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2039.799395] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2039.799658] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.800014] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2039.800285] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0efb9652-94f4-440d-86d5-a2b2b0e12807 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.804879] env[62476]: DEBUG oslo_vmware.api [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Waiting for the task: (returnval){ [ 2039.804879] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52767c9d-5fcf-de08-405b-e04323ec4bc2" [ 2039.804879] env[62476]: _type = "Task" [ 2039.804879] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.812485] env[62476]: DEBUG oslo_vmware.api [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52767c9d-5fcf-de08-405b-e04323ec4bc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.315125] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.315552] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2040.315610] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5159648b-abec-426b-945c-7efe9c612dc3 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2040.888752] env[62476]: DEBUG nova.compute.manager [req-4e22a40c-95f0-4731-9e15-94ba8b21a5c1 req-dc990b7c-217f-4d68-9fc4-0a0122691438 service nova] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Received event network-changed-5ce25b57-38a6-4c48-818a-7017cdcf640c {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2040.888979] env[62476]: DEBUG nova.compute.manager [req-4e22a40c-95f0-4731-9e15-94ba8b21a5c1 req-dc990b7c-217f-4d68-9fc4-0a0122691438 service nova] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Refreshing instance network info cache due to event network-changed-5ce25b57-38a6-4c48-818a-7017cdcf640c. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2040.889239] env[62476]: DEBUG oslo_concurrency.lockutils [req-4e22a40c-95f0-4731-9e15-94ba8b21a5c1 req-dc990b7c-217f-4d68-9fc4-0a0122691438 service nova] Acquiring lock "refresh_cache-56a5da15-57da-4d4d-a359-d90b780f67e4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2040.889401] env[62476]: DEBUG oslo_concurrency.lockutils [req-4e22a40c-95f0-4731-9e15-94ba8b21a5c1 req-dc990b7c-217f-4d68-9fc4-0a0122691438 service nova] Acquired lock "refresh_cache-56a5da15-57da-4d4d-a359-d90b780f67e4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2040.889611] env[62476]: DEBUG nova.network.neutron [req-4e22a40c-95f0-4731-9e15-94ba8b21a5c1 req-dc990b7c-217f-4d68-9fc4-0a0122691438 service nova] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Refreshing network info cache for port 5ce25b57-38a6-4c48-818a-7017cdcf640c {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2041.464425] env[62476]: DEBUG nova.network.neutron [req-4e22a40c-95f0-4731-9e15-94ba8b21a5c1 req-dc990b7c-217f-4d68-9fc4-0a0122691438 service nova] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Updated VIF entry in instance network info cache for port 5ce25b57-38a6-4c48-818a-7017cdcf640c. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2041.464778] env[62476]: DEBUG nova.network.neutron [req-4e22a40c-95f0-4731-9e15-94ba8b21a5c1 req-dc990b7c-217f-4d68-9fc4-0a0122691438 service nova] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Updating instance_info_cache with network_info: [{"id": "5ce25b57-38a6-4c48-818a-7017cdcf640c", "address": "fa:16:3e:50:16:14", "network": {"id": "f3c02f39-695a-4c59-8067-f729d5445329", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2118224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43c3514f06db4f73bb8107310a9e8d2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ce25b57-38", "ovs_interfaceid": "5ce25b57-38a6-4c48-818a-7017cdcf640c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2041.476132] env[62476]: DEBUG oslo_concurrency.lockutils [req-4e22a40c-95f0-4731-9e15-94ba8b21a5c1 req-dc990b7c-217f-4d68-9fc4-0a0122691438 service nova] Releasing lock "refresh_cache-56a5da15-57da-4d4d-a359-d90b780f67e4" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2042.023062] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2059.038253] env[62476]: DEBUG oslo_concurrency.lockutils [None req-eb480544-9680-43ae-a8ea-4bdd23969c74 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "8fdd45f2-0c21-461f-896e-698182bd5337" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2082.949672] env[62476]: WARNING oslo_vmware.rw_handles [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2082.949672] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2082.949672] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2082.949672] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2082.949672] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2082.949672] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 2082.949672] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2082.949672] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2082.949672] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2082.949672] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2082.949672] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2082.949672] env[62476]: ERROR oslo_vmware.rw_handles [ 2082.949672] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/29050c86-30a4-4c47-a2b6-24612259bb6b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2082.952020] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2082.952291] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Copying Virtual Disk [datastore1] vmware_temp/29050c86-30a4-4c47-a2b6-24612259bb6b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/29050c86-30a4-4c47-a2b6-24612259bb6b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2082.952576] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc086844-c2e5-4a52-8358-90e349679943 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.960479] env[62476]: DEBUG oslo_vmware.api [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for the task: (returnval){ [ 2082.960479] env[62476]: value = "task-4319195" [ 2082.960479] env[62476]: _type = "Task" [ 2082.960479] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.969184] env[62476]: DEBUG oslo_vmware.api [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': task-4319195, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.026943] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.038947] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2083.039172] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2083.039350] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2083.039501] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2083.040585] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beeac336-14f9-4ea9-b207-382e355a8c7c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.049208] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34592d4-dc11-402a-9cd4-5125fa939b85 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.063033] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d881f8c-0afb-4d4e-9f85-a08d5e030c00 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.069362] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa59e5b7-5a46-43c1-ab94-2194d865b416 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.832029] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180715MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2083.832220] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2083.832399] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2083.843168] env[62476]: DEBUG oslo_vmware.exceptions [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2083.844053] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2083.844053] env[62476]: ERROR nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2083.844053] env[62476]: Faults: ['InvalidArgument'] [ 2083.844053] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Traceback (most recent call last): [ 2083.844053] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2083.844053] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] yield resources [ 2083.844053] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2083.844053] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] self.driver.spawn(context, instance, image_meta, [ 2083.844053] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2083.844053] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2083.844476] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2083.844476] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] self._fetch_image_if_missing(context, vi) [ 2083.844476] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2083.844476] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] image_cache(vi, tmp_image_ds_loc) [ 2083.844476] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2083.844476] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] vm_util.copy_virtual_disk( [ 2083.844476] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2083.844476] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] session._wait_for_task(vmdk_copy_task) [ 2083.844476] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2083.844476] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] return self.wait_for_task(task_ref) [ 2083.844476] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2083.844476] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] return evt.wait() [ 2083.844476] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2083.844883] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] result = hub.switch() [ 2083.844883] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2083.844883] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] return self.greenlet.switch() [ 2083.844883] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2083.844883] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] self.f(*self.args, **self.kw) [ 2083.844883] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2083.844883] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] raise exceptions.translate_fault(task_info.error) [ 2083.844883] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2083.844883] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Faults: ['InvalidArgument'] [ 2083.844883] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] [ 2083.844883] env[62476]: INFO nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Terminating instance [ 2083.845897] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2083.846119] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2083.846370] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca0ff446-de1e-4962-a233-4c6c00c4fdd6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.848877] env[62476]: DEBUG nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2083.849082] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2083.849815] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c6a5b7-36a2-4b75-a6c5-e632279f025d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.856697] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2083.856885] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1fb28f51-527e-4d0f-8bd0-7fcc08f5beb9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.859188] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2083.859375] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2083.860336] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6debce13-0b97-4a09-a594-7f6d982ad5a6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.865822] env[62476]: DEBUG oslo_vmware.api [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for the task: (returnval){ [ 2083.865822] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52ec4264-6a4e-2c83-121e-78e4105cb662" [ 2083.865822] env[62476]: _type = "Task" [ 2083.865822] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.873655] env[62476]: DEBUG oslo_vmware.api [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52ec4264-6a4e-2c83-121e-78e4105cb662, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.980569] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance ea606214-a34b-4972-8948-a6ff8c55b889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.980871] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 1d67c106-ced2-4b4e-8abd-1652bd0509d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.980871] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e6b815fb-fa2d-4797-8810-c2b891f375cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.981126] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance cc61313f-d7db-4c5d-bb8e-1e516d2a89ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.981126] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 9497c622-7f14-4fc2-ac24-d611897a8be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.981760] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3462762c-09da-473b-b2ba-4dce6c99dd8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.981760] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.981760] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 8fdd45f2-0c21-461f-896e-698182bd5337 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.981760] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 11af6076-e985-477c-98a6-437843b26b02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.981986] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 56a5da15-57da-4d4d-a359-d90b780f67e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.981986] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2083.982078] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=100GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2083.997829] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing inventories for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2084.014498] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Updating ProviderTree inventory for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2084.014498] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Updating inventory in ProviderTree for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2084.026214] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing aggregate associations for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11, aggregates: None {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2084.045524] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing trait associations for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2084.168600] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aad726a-8114-408f-bebb-9f4f9b44f5f1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.176759] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f25dfb-6cf7-4e5b-a6cf-bf009d9f7d59 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.206382] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15075b0-2f75-42e1-a7e7-e2fc0ccba13b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.213813] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ba431f-4b3e-4f58-a907-29280563a2ab {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.226907] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2084.235748] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2084.250603] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2084.250804] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.418s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2084.377357] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2084.377617] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Creating directory with path [datastore1] vmware_temp/b5004349-081e-4e78-a3e0-41cccfaca9b7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2084.378833] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebad9432-7751-4a90-9a97-686286196af1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.413964] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Created directory with path [datastore1] vmware_temp/b5004349-081e-4e78-a3e0-41cccfaca9b7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2084.414229] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Fetch image to [datastore1] vmware_temp/b5004349-081e-4e78-a3e0-41cccfaca9b7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2084.414344] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/b5004349-081e-4e78-a3e0-41cccfaca9b7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2084.415210] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d29907-fdd0-4e93-8944-4d1e0fb48268 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.422371] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7985b455-5400-472c-af8a-3a1a3df6a980 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.431695] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f21139-f27a-4a25-966f-1e1d4d7d75ff {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.461605] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2491910b-a6a8-49c6-93fd-b2ede02f6031 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.467907] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4f6516bd-2fd3-4615-b7fc-0c7c82682c0f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.490193] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2084.542570] env[62476]: DEBUG oslo_vmware.rw_handles [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5004349-081e-4e78-a3e0-41cccfaca9b7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2084.602259] env[62476]: DEBUG oslo_vmware.rw_handles [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2084.602452] env[62476]: DEBUG oslo_vmware.rw_handles [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5004349-081e-4e78-a3e0-41cccfaca9b7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2084.943553] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2084.943827] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2084.944037] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Deleting the datastore file [datastore1] ea606214-a34b-4972-8948-a6ff8c55b889 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2084.944378] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7748264-868c-4953-b710-39525f863c26 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.950923] env[62476]: DEBUG oslo_vmware.api [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for the task: (returnval){ [ 2084.950923] env[62476]: value = "task-4319197" [ 2084.950923] env[62476]: _type = "Task" [ 2084.950923] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.960573] env[62476]: DEBUG oslo_vmware.api [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': task-4319197, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.460921] env[62476]: DEBUG oslo_vmware.api [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': task-4319197, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105217} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.461368] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2085.461428] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2085.461560] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2085.461738] env[62476]: INFO nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Took 1.61 seconds to destroy the instance on the hypervisor. [ 2085.464442] env[62476]: DEBUG nova.compute.claims [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2085.464627] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.464845] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.658307] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb658b3d-2836-433e-82fd-3102caafbd90 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.666381] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3e2b87-8344-45b9-9375-6236022c4ecc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.695102] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c318f13-734b-4980-aa99-4d53444293b0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.702267] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193b9350-af10-459d-9052-6b78880afeb6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.715297] env[62476]: DEBUG nova.compute.provider_tree [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2085.723821] env[62476]: DEBUG nova.scheduler.client.report [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2085.736997] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.272s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2085.737547] env[62476]: ERROR nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2085.737547] env[62476]: Faults: ['InvalidArgument'] [ 2085.737547] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Traceback (most recent call last): [ 2085.737547] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2085.737547] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] self.driver.spawn(context, instance, image_meta, [ 2085.737547] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2085.737547] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2085.737547] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2085.737547] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] self._fetch_image_if_missing(context, vi) [ 2085.737547] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2085.737547] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] image_cache(vi, tmp_image_ds_loc) [ 2085.737547] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2085.737998] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] vm_util.copy_virtual_disk( [ 2085.737998] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2085.737998] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] session._wait_for_task(vmdk_copy_task) [ 2085.737998] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2085.737998] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] return self.wait_for_task(task_ref) [ 2085.737998] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2085.737998] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] return evt.wait() [ 2085.737998] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2085.737998] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] result = hub.switch() [ 2085.737998] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2085.737998] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] return self.greenlet.switch() [ 2085.737998] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2085.737998] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] self.f(*self.args, **self.kw) [ 2085.738437] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2085.738437] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] raise exceptions.translate_fault(task_info.error) [ 2085.738437] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2085.738437] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Faults: ['InvalidArgument'] [ 2085.738437] env[62476]: ERROR nova.compute.manager [instance: ea606214-a34b-4972-8948-a6ff8c55b889] [ 2085.738437] env[62476]: DEBUG nova.compute.utils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2085.739567] env[62476]: DEBUG nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Build of instance ea606214-a34b-4972-8948-a6ff8c55b889 was re-scheduled: A specified parameter was not correct: fileType [ 2085.739567] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2085.739928] env[62476]: DEBUG nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2085.740110] env[62476]: DEBUG nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2085.740286] env[62476]: DEBUG nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2085.740448] env[62476]: DEBUG nova.network.neutron [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2086.098897] env[62476]: DEBUG nova.network.neutron [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2086.120318] env[62476]: INFO nova.compute.manager [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Took 0.38 seconds to deallocate network for instance. [ 2086.227484] env[62476]: INFO nova.scheduler.client.report [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Deleted allocations for instance ea606214-a34b-4972-8948-a6ff8c55b889 [ 2086.251346] env[62476]: DEBUG oslo_concurrency.lockutils [None req-35811110-408d-4e60-9f54-d80abe1e5cf8 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "ea606214-a34b-4972-8948-a6ff8c55b889" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 617.625s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.251600] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3f077be6-4049-4cb0-9fd3-01c1854f0447 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "ea606214-a34b-4972-8948-a6ff8c55b889" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 421.122s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.251824] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3f077be6-4049-4cb0-9fd3-01c1854f0447 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquiring lock "ea606214-a34b-4972-8948-a6ff8c55b889-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2086.252049] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3f077be6-4049-4cb0-9fd3-01c1854f0447 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "ea606214-a34b-4972-8948-a6ff8c55b889-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.252228] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3f077be6-4049-4cb0-9fd3-01c1854f0447 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "ea606214-a34b-4972-8948-a6ff8c55b889-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.254643] env[62476]: INFO nova.compute.manager [None req-3f077be6-4049-4cb0-9fd3-01c1854f0447 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Terminating instance [ 2086.256427] env[62476]: DEBUG nova.compute.manager [None req-3f077be6-4049-4cb0-9fd3-01c1854f0447 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2086.256623] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3f077be6-4049-4cb0-9fd3-01c1854f0447 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2086.257124] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e597ac26-d908-4893-83b9-3ba067456e6b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.266159] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1537c184-ea3a-4232-8461-77edeb2fc0ba {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.297636] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-3f077be6-4049-4cb0-9fd3-01c1854f0447 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ea606214-a34b-4972-8948-a6ff8c55b889 could not be found. [ 2086.297770] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3f077be6-4049-4cb0-9fd3-01c1854f0447 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2086.297855] env[62476]: INFO nova.compute.manager [None req-3f077be6-4049-4cb0-9fd3-01c1854f0447 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2086.298135] env[62476]: DEBUG oslo.service.loopingcall [None req-3f077be6-4049-4cb0-9fd3-01c1854f0447 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2086.298389] env[62476]: DEBUG nova.compute.manager [-] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2086.299076] env[62476]: DEBUG nova.network.neutron [-] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2086.330321] env[62476]: DEBUG nova.network.neutron [-] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2086.339304] env[62476]: INFO nova.compute.manager [-] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] Took 0.04 seconds to deallocate network for instance. [ 2086.446674] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3f077be6-4049-4cb0-9fd3-01c1854f0447 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Lock "ea606214-a34b-4972-8948-a6ff8c55b889" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.195s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.447630] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "ea606214-a34b-4972-8948-a6ff8c55b889" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 271.999s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.447827] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: ea606214-a34b-4972-8948-a6ff8c55b889] During sync_power_state the instance has a pending task (deleting). Skip. [ 2086.448009] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "ea606214-a34b-4972-8948-a6ff8c55b889" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.251221] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2087.251621] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2087.252284] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2087.272442] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2087.272633] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2087.272854] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2087.272854] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2087.273109] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2087.273269] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2087.273510] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2087.273600] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 11af6076-e985-477c-98a6-437843b26b02] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2087.273772] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2087.273984] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2087.274554] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2087.274735] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2087.274892] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2087.275143] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2089.028720] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2089.029328] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Cleaning up deleted instances with incomplete migration {{(pid=62476) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 2093.035722] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2093.036203] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2093.036203] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2095.028607] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2103.027648] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2103.028036] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Cleaning up deleted instances {{(pid=62476) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 2103.039563] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] There are 0 instances to clean {{(pid=62476) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 2104.239173] env[62476]: DEBUG oslo_concurrency.lockutils [None req-449b5f9a-1eb4-4430-adc1-9ac55ed1d894 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "11af6076-e985-477c-98a6-437843b26b02" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2109.028228] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2121.643262] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "63657e6f-8e2e-41e0-ad6e-2a13a90bf7de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2121.643618] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "63657e6f-8e2e-41e0-ad6e-2a13a90bf7de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2121.654735] env[62476]: DEBUG nova.compute.manager [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2121.715189] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2121.715459] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2121.716936] env[62476]: INFO nova.compute.claims [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2121.892019] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959b2e01-3618-4a16-acdd-7d50e574f850 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.899327] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94dfc9b7-1d38-4f40-a559-50ff0f48617d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.931560] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-015bc810-1f4b-46f5-9cd9-38fa5510ce1f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.940063] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d240325-8377-4350-bdce-d67354c3f705 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.953586] env[62476]: DEBUG nova.compute.provider_tree [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2121.963571] env[62476]: DEBUG nova.scheduler.client.report [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2121.976959] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.261s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2121.977444] env[62476]: DEBUG nova.compute.manager [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2122.010636] env[62476]: DEBUG nova.compute.utils [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2122.012317] env[62476]: DEBUG nova.compute.manager [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2122.012492] env[62476]: DEBUG nova.network.neutron [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2122.021956] env[62476]: DEBUG nova.compute.manager [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2122.081207] env[62476]: DEBUG nova.policy [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9281b2dcb9c0440495b676e3291d6d92', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1bc32d84f43a439396eacf3e9da5ad7d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 2122.088144] env[62476]: DEBUG nova.compute.manager [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2122.114013] env[62476]: DEBUG nova.virt.hardware [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2122.114313] env[62476]: DEBUG nova.virt.hardware [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2122.114475] env[62476]: DEBUG nova.virt.hardware [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2122.114656] env[62476]: DEBUG nova.virt.hardware [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2122.114805] env[62476]: DEBUG nova.virt.hardware [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2122.114955] env[62476]: DEBUG nova.virt.hardware [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2122.115175] env[62476]: DEBUG nova.virt.hardware [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2122.115338] env[62476]: DEBUG nova.virt.hardware [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2122.115523] env[62476]: DEBUG nova.virt.hardware [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2122.115701] env[62476]: DEBUG nova.virt.hardware [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2122.115878] env[62476]: DEBUG nova.virt.hardware [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2122.116812] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5063d828-0ccb-4f33-9bee-9aafbd006136 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.125371] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5998f9e0-20f6-49cf-ace5-fd430ed0bb23 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.439828] env[62476]: DEBUG nova.network.neutron [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Successfully created port: 94102ffd-34ad-43f3-95a9-716aee794dd1 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2123.036185] env[62476]: DEBUG nova.compute.manager [req-81c42ecd-b8c7-4ddd-8b7a-68660589b799 req-2bc2cd7e-50b0-402e-b595-157cfae75867 service nova] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Received event network-vif-plugged-94102ffd-34ad-43f3-95a9-716aee794dd1 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2123.036474] env[62476]: DEBUG oslo_concurrency.lockutils [req-81c42ecd-b8c7-4ddd-8b7a-68660589b799 req-2bc2cd7e-50b0-402e-b595-157cfae75867 service nova] Acquiring lock "63657e6f-8e2e-41e0-ad6e-2a13a90bf7de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2123.036637] env[62476]: DEBUG oslo_concurrency.lockutils [req-81c42ecd-b8c7-4ddd-8b7a-68660589b799 req-2bc2cd7e-50b0-402e-b595-157cfae75867 service nova] Lock "63657e6f-8e2e-41e0-ad6e-2a13a90bf7de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.036829] env[62476]: DEBUG oslo_concurrency.lockutils [req-81c42ecd-b8c7-4ddd-8b7a-68660589b799 req-2bc2cd7e-50b0-402e-b595-157cfae75867 service nova] Lock "63657e6f-8e2e-41e0-ad6e-2a13a90bf7de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.037033] env[62476]: DEBUG nova.compute.manager [req-81c42ecd-b8c7-4ddd-8b7a-68660589b799 req-2bc2cd7e-50b0-402e-b595-157cfae75867 service nova] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] No waiting events found dispatching network-vif-plugged-94102ffd-34ad-43f3-95a9-716aee794dd1 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2123.037266] env[62476]: WARNING nova.compute.manager [req-81c42ecd-b8c7-4ddd-8b7a-68660589b799 req-2bc2cd7e-50b0-402e-b595-157cfae75867 service nova] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Received unexpected event network-vif-plugged-94102ffd-34ad-43f3-95a9-716aee794dd1 for instance with vm_state building and task_state spawning. [ 2123.198610] env[62476]: DEBUG nova.network.neutron [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Successfully updated port: 94102ffd-34ad-43f3-95a9-716aee794dd1 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2123.216926] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "refresh_cache-63657e6f-8e2e-41e0-ad6e-2a13a90bf7de" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2123.216926] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquired lock "refresh_cache-63657e6f-8e2e-41e0-ad6e-2a13a90bf7de" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2123.217311] env[62476]: DEBUG nova.network.neutron [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2123.263749] env[62476]: DEBUG nova.network.neutron [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2123.692543] env[62476]: DEBUG nova.network.neutron [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Updating instance_info_cache with network_info: [{"id": "94102ffd-34ad-43f3-95a9-716aee794dd1", "address": "fa:16:3e:39:d4:0b", "network": {"id": "87b70a9c-f6b7-4743-911a-9e2d0a108ca8", "bridge": "br-int", "label": "tempest-ServersTestJSON-742676103-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bc32d84f43a439396eacf3e9da5ad7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94102ffd-34", "ovs_interfaceid": "94102ffd-34ad-43f3-95a9-716aee794dd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2123.704609] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Releasing lock "refresh_cache-63657e6f-8e2e-41e0-ad6e-2a13a90bf7de" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2123.704899] env[62476]: DEBUG nova.compute.manager [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Instance network_info: |[{"id": "94102ffd-34ad-43f3-95a9-716aee794dd1", "address": "fa:16:3e:39:d4:0b", "network": {"id": "87b70a9c-f6b7-4743-911a-9e2d0a108ca8", "bridge": "br-int", "label": "tempest-ServersTestJSON-742676103-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bc32d84f43a439396eacf3e9da5ad7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94102ffd-34", "ovs_interfaceid": "94102ffd-34ad-43f3-95a9-716aee794dd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2123.705327] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:d4:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94102ffd-34ad-43f3-95a9-716aee794dd1', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2123.712861] env[62476]: DEBUG oslo.service.loopingcall [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2123.713372] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2123.713647] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9352579-2e18-4018-aa8d-642679b16570 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.735792] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2123.735792] env[62476]: value = "task-4319198" [ 2123.735792] env[62476]: _type = "Task" [ 2123.735792] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.744053] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319198, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.246834] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319198, 'name': CreateVM_Task} progress is 99%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.747472] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319198, 'name': CreateVM_Task, 'duration_secs': 0.550335} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.747650] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2124.748363] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2124.748533] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2124.748950] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2124.749127] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-173dd585-b0b4-4501-bd62-a5cf4c35c3f2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.754155] env[62476]: DEBUG oslo_vmware.api [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Waiting for the task: (returnval){ [ 2124.754155] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]525672f9-7593-ab16-4f07-10c3b23f97da" [ 2124.754155] env[62476]: _type = "Task" [ 2124.754155] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.762095] env[62476]: DEBUG oslo_vmware.api [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]525672f9-7593-ab16-4f07-10c3b23f97da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.062135] env[62476]: DEBUG nova.compute.manager [req-88575417-be80-4761-845e-21b8b4220d96 req-7a108be7-dae8-4e5f-828a-08f5532445f2 service nova] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Received event network-changed-94102ffd-34ad-43f3-95a9-716aee794dd1 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2125.062282] env[62476]: DEBUG nova.compute.manager [req-88575417-be80-4761-845e-21b8b4220d96 req-7a108be7-dae8-4e5f-828a-08f5532445f2 service nova] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Refreshing instance network info cache due to event network-changed-94102ffd-34ad-43f3-95a9-716aee794dd1. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2125.062496] env[62476]: DEBUG oslo_concurrency.lockutils [req-88575417-be80-4761-845e-21b8b4220d96 req-7a108be7-dae8-4e5f-828a-08f5532445f2 service nova] Acquiring lock "refresh_cache-63657e6f-8e2e-41e0-ad6e-2a13a90bf7de" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2125.062645] env[62476]: DEBUG oslo_concurrency.lockutils [req-88575417-be80-4761-845e-21b8b4220d96 req-7a108be7-dae8-4e5f-828a-08f5532445f2 service nova] Acquired lock "refresh_cache-63657e6f-8e2e-41e0-ad6e-2a13a90bf7de" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2125.062808] env[62476]: DEBUG nova.network.neutron [req-88575417-be80-4761-845e-21b8b4220d96 req-7a108be7-dae8-4e5f-828a-08f5532445f2 service nova] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Refreshing network info cache for port 94102ffd-34ad-43f3-95a9-716aee794dd1 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2125.264954] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2125.265330] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2125.265406] env[62476]: DEBUG oslo_concurrency.lockutils [None req-9d400242-8bfe-4cd2-ad1d-abf7219463d2 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2125.331267] env[62476]: DEBUG nova.network.neutron [req-88575417-be80-4761-845e-21b8b4220d96 req-7a108be7-dae8-4e5f-828a-08f5532445f2 service nova] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Updated VIF entry in instance network info cache for port 94102ffd-34ad-43f3-95a9-716aee794dd1. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2125.331704] env[62476]: DEBUG nova.network.neutron [req-88575417-be80-4761-845e-21b8b4220d96 req-7a108be7-dae8-4e5f-828a-08f5532445f2 service nova] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Updating instance_info_cache with network_info: [{"id": "94102ffd-34ad-43f3-95a9-716aee794dd1", "address": "fa:16:3e:39:d4:0b", "network": {"id": "87b70a9c-f6b7-4743-911a-9e2d0a108ca8", "bridge": "br-int", "label": "tempest-ServersTestJSON-742676103-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bc32d84f43a439396eacf3e9da5ad7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94102ffd-34", "ovs_interfaceid": "94102ffd-34ad-43f3-95a9-716aee794dd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2125.341597] env[62476]: DEBUG oslo_concurrency.lockutils [req-88575417-be80-4761-845e-21b8b4220d96 req-7a108be7-dae8-4e5f-828a-08f5532445f2 service nova] Releasing lock "refresh_cache-63657e6f-8e2e-41e0-ad6e-2a13a90bf7de" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2130.299136] env[62476]: WARNING oslo_vmware.rw_handles [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2130.299136] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2130.299136] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2130.299136] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2130.299136] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2130.299136] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 2130.299136] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2130.299136] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2130.299136] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2130.299136] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2130.299136] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2130.299136] env[62476]: ERROR oslo_vmware.rw_handles [ 2130.299940] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/b5004349-081e-4e78-a3e0-41cccfaca9b7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2130.301594] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2130.301840] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Copying Virtual Disk [datastore1] vmware_temp/b5004349-081e-4e78-a3e0-41cccfaca9b7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/b5004349-081e-4e78-a3e0-41cccfaca9b7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2130.302143] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1dc7da5-3e77-4b79-9cf0-9a532f827609 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.310795] env[62476]: DEBUG oslo_vmware.api [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for the task: (returnval){ [ 2130.310795] env[62476]: value = "task-4319199" [ 2130.310795] env[62476]: _type = "Task" [ 2130.310795] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.319398] env[62476]: DEBUG oslo_vmware.api [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': task-4319199, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.821554] env[62476]: DEBUG oslo_vmware.exceptions [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2130.821855] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2130.822433] env[62476]: ERROR nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2130.822433] env[62476]: Faults: ['InvalidArgument'] [ 2130.822433] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Traceback (most recent call last): [ 2130.822433] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2130.822433] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] yield resources [ 2130.822433] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2130.822433] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] self.driver.spawn(context, instance, image_meta, [ 2130.822433] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2130.822433] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2130.822433] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2130.822433] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] self._fetch_image_if_missing(context, vi) [ 2130.822433] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2130.822907] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] image_cache(vi, tmp_image_ds_loc) [ 2130.822907] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2130.822907] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] vm_util.copy_virtual_disk( [ 2130.822907] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2130.822907] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] session._wait_for_task(vmdk_copy_task) [ 2130.822907] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2130.822907] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] return self.wait_for_task(task_ref) [ 2130.822907] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2130.822907] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] return evt.wait() [ 2130.822907] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2130.822907] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] result = hub.switch() [ 2130.822907] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2130.822907] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] return self.greenlet.switch() [ 2130.823381] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2130.823381] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] self.f(*self.args, **self.kw) [ 2130.823381] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2130.823381] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] raise exceptions.translate_fault(task_info.error) [ 2130.823381] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2130.823381] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Faults: ['InvalidArgument'] [ 2130.823381] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] [ 2130.823381] env[62476]: INFO nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Terminating instance [ 2130.824352] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2130.824557] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2130.824826] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72a6686a-db17-4587-a45c-0ef96407258c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.827251] env[62476]: DEBUG nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2130.827449] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2130.828254] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84893c91-48bf-4c48-8596-f06b07b8af87 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.835271] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2130.835495] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-46befcba-d751-48d5-a99e-fbe8640ebdf4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.837892] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2130.838161] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2130.839150] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b0a5dff-6049-4e45-a0c0-d1cce6458cad {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.844673] env[62476]: DEBUG oslo_vmware.api [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for the task: (returnval){ [ 2130.844673] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52d70025-c961-b07d-c377-543f5a65235d" [ 2130.844673] env[62476]: _type = "Task" [ 2130.844673] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.853203] env[62476]: DEBUG oslo_vmware.api [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52d70025-c961-b07d-c377-543f5a65235d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.357262] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2131.357615] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Creating directory with path [datastore1] vmware_temp/2a18739b-8485-4b86-8de7-aecdc2275ba4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2131.357736] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f309adfc-0220-40ed-a175-5f220c0db561 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.380447] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Created directory with path [datastore1] vmware_temp/2a18739b-8485-4b86-8de7-aecdc2275ba4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2131.380691] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Fetch image to [datastore1] vmware_temp/2a18739b-8485-4b86-8de7-aecdc2275ba4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2131.380848] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/2a18739b-8485-4b86-8de7-aecdc2275ba4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2131.381693] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed563c03-d37c-47aa-9912-af3deb460dc3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.390207] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234fa901-a074-41b6-b5f2-9007d760b91a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.400921] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdd8853-b07c-4210-8b6e-e29843b04028 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.433506] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311c6d74-3a76-4266-86d7-40ef4f8f9938 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.440321] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e77501a5-9290-449c-a212-634e4afaa6d2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.463384] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2131.621374] env[62476]: DEBUG oslo_vmware.rw_handles [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2a18739b-8485-4b86-8de7-aecdc2275ba4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2131.681337] env[62476]: DEBUG oslo_vmware.rw_handles [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2131.681552] env[62476]: DEBUG oslo_vmware.rw_handles [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2a18739b-8485-4b86-8de7-aecdc2275ba4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2132.310253] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b759db9c-cd4a-4982-bf7a-eabb48ead7e1 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "9497c622-7f14-4fc2-ac24-d611897a8be9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2132.370724] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2132.371123] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2132.371123] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Deleting the datastore file [datastore1] 1d67c106-ced2-4b4e-8abd-1652bd0509d1 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2132.371391] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d058d32-a878-4f29-8594-b156d2607d0f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.379022] env[62476]: DEBUG oslo_vmware.api [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for the task: (returnval){ [ 2132.379022] env[62476]: value = "task-4319201" [ 2132.379022] env[62476]: _type = "Task" [ 2132.379022] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.387306] env[62476]: DEBUG oslo_vmware.api [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': task-4319201, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.889473] env[62476]: DEBUG oslo_vmware.api [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': task-4319201, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077028} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2132.889635] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2132.889818] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2132.889993] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2132.890184] env[62476]: INFO nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Took 2.06 seconds to destroy the instance on the hypervisor. [ 2132.894054] env[62476]: DEBUG nova.compute.claims [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2132.894054] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2132.894054] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.067951] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1aa9dc-c6b5-4004-8c3b-d55bc4fc7295 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.076461] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ff1d25-2cb2-4f3a-b72d-da4bd3d5be60 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.106756] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a20d01a-0785-4701-b4a7-37c6943ea93a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.115107] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0097da45-79e7-45a9-b29b-c33bd5f91d00 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.130127] env[62476]: DEBUG nova.compute.provider_tree [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2133.141398] env[62476]: DEBUG nova.scheduler.client.report [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2133.155601] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.262s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.156214] env[62476]: ERROR nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2133.156214] env[62476]: Faults: ['InvalidArgument'] [ 2133.156214] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Traceback (most recent call last): [ 2133.156214] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2133.156214] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] self.driver.spawn(context, instance, image_meta, [ 2133.156214] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2133.156214] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2133.156214] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2133.156214] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] self._fetch_image_if_missing(context, vi) [ 2133.156214] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2133.156214] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] image_cache(vi, tmp_image_ds_loc) [ 2133.156214] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2133.156638] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] vm_util.copy_virtual_disk( [ 2133.156638] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2133.156638] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] session._wait_for_task(vmdk_copy_task) [ 2133.156638] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2133.156638] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] return self.wait_for_task(task_ref) [ 2133.156638] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2133.156638] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] return evt.wait() [ 2133.156638] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2133.156638] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] result = hub.switch() [ 2133.156638] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2133.156638] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] return self.greenlet.switch() [ 2133.156638] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2133.156638] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] self.f(*self.args, **self.kw) [ 2133.157092] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2133.157092] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] raise exceptions.translate_fault(task_info.error) [ 2133.157092] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2133.157092] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Faults: ['InvalidArgument'] [ 2133.157092] env[62476]: ERROR nova.compute.manager [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] [ 2133.157092] env[62476]: DEBUG nova.compute.utils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2133.159108] env[62476]: DEBUG nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Build of instance 1d67c106-ced2-4b4e-8abd-1652bd0509d1 was re-scheduled: A specified parameter was not correct: fileType [ 2133.159108] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2133.159502] env[62476]: DEBUG nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2133.159677] env[62476]: DEBUG nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2133.159853] env[62476]: DEBUG nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2133.160034] env[62476]: DEBUG nova.network.neutron [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2133.552901] env[62476]: DEBUG nova.network.neutron [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2133.569763] env[62476]: INFO nova.compute.manager [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Took 0.41 seconds to deallocate network for instance. [ 2133.707295] env[62476]: INFO nova.scheduler.client.report [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Deleted allocations for instance 1d67c106-ced2-4b4e-8abd-1652bd0509d1 [ 2133.734570] env[62476]: DEBUG oslo_concurrency.lockutils [None req-27c1df13-49ef-4027-884e-902a2e8d5608 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 619.646s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.735774] env[62476]: DEBUG oslo_concurrency.lockutils [None req-4f1ee34c-a376-440d-bf7e-6e8136a8abbc tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 423.526s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.736494] env[62476]: DEBUG oslo_concurrency.lockutils [None req-4f1ee34c-a376-440d-bf7e-6e8136a8abbc tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2133.736494] env[62476]: DEBUG oslo_concurrency.lockutils [None req-4f1ee34c-a376-440d-bf7e-6e8136a8abbc tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.736962] env[62476]: DEBUG oslo_concurrency.lockutils [None req-4f1ee34c-a376-440d-bf7e-6e8136a8abbc tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.741076] env[62476]: INFO nova.compute.manager [None req-4f1ee34c-a376-440d-bf7e-6e8136a8abbc tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Terminating instance [ 2133.742218] env[62476]: DEBUG nova.compute.manager [None req-4f1ee34c-a376-440d-bf7e-6e8136a8abbc tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2133.742567] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1ee34c-a376-440d-bf7e-6e8136a8abbc tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2133.743236] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-456b786d-e48f-4d50-9265-0ba907fa100d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.755137] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31cf8b95-f877-4375-8ac2-5dccd5e8ba15 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.788042] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-4f1ee34c-a376-440d-bf7e-6e8136a8abbc tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1d67c106-ced2-4b4e-8abd-1652bd0509d1 could not be found. [ 2133.788500] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1ee34c-a376-440d-bf7e-6e8136a8abbc tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2133.789180] env[62476]: INFO nova.compute.manager [None req-4f1ee34c-a376-440d-bf7e-6e8136a8abbc tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2133.789611] env[62476]: DEBUG oslo.service.loopingcall [None req-4f1ee34c-a376-440d-bf7e-6e8136a8abbc tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2133.789990] env[62476]: DEBUG nova.compute.manager [-] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2133.790230] env[62476]: DEBUG nova.network.neutron [-] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2133.834583] env[62476]: DEBUG nova.network.neutron [-] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2133.849234] env[62476]: INFO nova.compute.manager [-] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] Took 0.06 seconds to deallocate network for instance. [ 2133.944126] env[62476]: DEBUG oslo_concurrency.lockutils [None req-4f1ee34c-a376-440d-bf7e-6e8136a8abbc tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.209s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.945220] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 319.496s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.945428] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 1d67c106-ced2-4b4e-8abd-1652bd0509d1] During sync_power_state the instance has a pending task (deleting). Skip. [ 2133.945605] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "1d67c106-ced2-4b4e-8abd-1652bd0509d1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2144.035141] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.047597] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2144.047830] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2144.048016] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2144.048188] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2144.049348] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98924375-c06f-4c6e-b9a8-52de13ea0830 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.058601] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b959670a-d210-4987-9402-11c0c864983b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.072967] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093bb5a5-e095-46dc-a86b-553181a0b11a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.079880] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d0185e-cccd-4f1d-89d8-2b882d0a7e23 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.109978] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180663MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2144.110197] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2144.110339] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2144.182983] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance e6b815fb-fa2d-4797-8810-c2b891f375cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.183171] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance cc61313f-d7db-4c5d-bb8e-1e516d2a89ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.183300] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 9497c622-7f14-4fc2-ac24-d611897a8be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.183561] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3462762c-09da-473b-b2ba-4dce6c99dd8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.183561] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.183670] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 8fdd45f2-0c21-461f-896e-698182bd5337 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.183839] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 11af6076-e985-477c-98a6-437843b26b02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.183979] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 56a5da15-57da-4d4d-a359-d90b780f67e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.184121] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.184324] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2144.184464] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=100GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2144.299078] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67de62da-c244-4937-a691-8a7887d0b0a4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.307332] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2d15e2-8735-4fe1-a510-c9ede6cc765d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.336674] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51517339-1c5f-41b3-834b-a576d6d4078b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.344352] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4112605a-5297-4bf1-89ac-cd25ecbc29db {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.358934] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2144.370069] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2144.404236] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2144.404454] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.294s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2147.396866] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2148.027419] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2148.027621] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2148.027671] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2148.047616] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2148.047791] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2148.047892] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2148.048031] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2148.048186] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2148.048309] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2148.048434] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 11af6076-e985-477c-98a6-437843b26b02] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2148.048551] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2148.048669] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2148.048783] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2148.049265] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2148.049442] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2148.049574] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2153.027910] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2153.028275] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2155.028948] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2157.028025] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2165.023585] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2178.244260] env[62476]: WARNING oslo_vmware.rw_handles [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2178.244260] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2178.244260] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2178.244260] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2178.244260] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2178.244260] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 2178.244260] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2178.244260] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2178.244260] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2178.244260] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2178.244260] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2178.244260] env[62476]: ERROR oslo_vmware.rw_handles [ 2178.244260] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/2a18739b-8485-4b86-8de7-aecdc2275ba4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2178.246041] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2178.246570] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Copying Virtual Disk [datastore1] vmware_temp/2a18739b-8485-4b86-8de7-aecdc2275ba4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/2a18739b-8485-4b86-8de7-aecdc2275ba4/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2178.247113] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38ea554f-79c3-42ef-9ec3-a71005e9818f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.256916] env[62476]: DEBUG oslo_vmware.api [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for the task: (returnval){ [ 2178.256916] env[62476]: value = "task-4319202" [ 2178.256916] env[62476]: _type = "Task" [ 2178.256916] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.265845] env[62476]: DEBUG oslo_vmware.api [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': task-4319202, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.770776] env[62476]: DEBUG oslo_vmware.exceptions [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2178.771084] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2178.771671] env[62476]: ERROR nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2178.771671] env[62476]: Faults: ['InvalidArgument'] [ 2178.771671] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Traceback (most recent call last): [ 2178.771671] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2178.771671] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] yield resources [ 2178.771671] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2178.771671] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] self.driver.spawn(context, instance, image_meta, [ 2178.771671] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2178.771671] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2178.771671] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2178.771671] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] self._fetch_image_if_missing(context, vi) [ 2178.771671] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2178.772149] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] image_cache(vi, tmp_image_ds_loc) [ 2178.772149] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2178.772149] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] vm_util.copy_virtual_disk( [ 2178.772149] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2178.772149] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] session._wait_for_task(vmdk_copy_task) [ 2178.772149] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2178.772149] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] return self.wait_for_task(task_ref) [ 2178.772149] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2178.772149] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] return evt.wait() [ 2178.772149] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2178.772149] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] result = hub.switch() [ 2178.772149] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2178.772149] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] return self.greenlet.switch() [ 2178.772643] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2178.772643] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] self.f(*self.args, **self.kw) [ 2178.772643] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2178.772643] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] raise exceptions.translate_fault(task_info.error) [ 2178.772643] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2178.772643] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Faults: ['InvalidArgument'] [ 2178.772643] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] [ 2178.772643] env[62476]: INFO nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Terminating instance [ 2178.773643] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2178.773869] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2178.774139] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27f64d99-6651-4584-be2e-5581a1dd5de5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.776361] env[62476]: DEBUG nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2178.776579] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2178.777328] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f7a52e-0ec7-44ac-ad23-c26279e2dc55 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.784347] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2178.784544] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aeae4aad-25f4-43fa-96cb-30500bb87bb8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.786730] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2178.786900] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2178.787927] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd9c88aa-d5b1-4cdc-a81a-cf2edd401e92 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.793048] env[62476]: DEBUG oslo_vmware.api [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Waiting for the task: (returnval){ [ 2178.793048] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]526b86e7-145e-3268-0894-1dbd4e32b9ea" [ 2178.793048] env[62476]: _type = "Task" [ 2178.793048] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.800577] env[62476]: DEBUG oslo_vmware.api [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]526b86e7-145e-3268-0894-1dbd4e32b9ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.850928] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2178.851197] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2178.851339] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Deleting the datastore file [datastore1] e6b815fb-fa2d-4797-8810-c2b891f375cf {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2178.851618] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0ff8b91-e73c-4e1d-aee9-791c3d0dd987 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.857682] env[62476]: DEBUG oslo_vmware.api [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for the task: (returnval){ [ 2178.857682] env[62476]: value = "task-4319204" [ 2178.857682] env[62476]: _type = "Task" [ 2178.857682] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.865529] env[62476]: DEBUG oslo_vmware.api [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': task-4319204, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.303388] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2179.303776] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Creating directory with path [datastore1] vmware_temp/c21f61f8-0235-4a10-8db8-b723de7d4de2/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2179.303874] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a4df55d-8c05-4062-b1ed-1b8b0471d0a0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.316763] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Created directory with path [datastore1] vmware_temp/c21f61f8-0235-4a10-8db8-b723de7d4de2/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2179.316949] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Fetch image to [datastore1] vmware_temp/c21f61f8-0235-4a10-8db8-b723de7d4de2/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2179.317158] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/c21f61f8-0235-4a10-8db8-b723de7d4de2/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2179.317930] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8268d957-a0cb-45ce-93ef-644f10f514fb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.324412] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c44c98e-d4b6-44c7-a179-7120873bf6e8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.333333] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e2a984-ccf4-4899-86b1-932254ad2b1a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.366747] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23d0b1c-8e8a-44a1-8b0c-906568a273d7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.374143] env[62476]: DEBUG oslo_vmware.api [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Task: {'id': task-4319204, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066508} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2179.375620] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2179.375810] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2179.375983] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2179.376174] env[62476]: INFO nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2179.378306] env[62476]: DEBUG nova.compute.claims [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2179.378481] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.378742] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.381470] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1d6c00b5-55b7-4f67-8fa7-aded1e8cca4c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.402580] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2179.459530] env[62476]: DEBUG oslo_vmware.rw_handles [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c21f61f8-0235-4a10-8db8-b723de7d4de2/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2179.521579] env[62476]: DEBUG oslo_vmware.rw_handles [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2179.521865] env[62476]: DEBUG oslo_vmware.rw_handles [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c21f61f8-0235-4a10-8db8-b723de7d4de2/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2179.620308] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042d21ff-30ff-4eab-a8b7-228309dd3715 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.628581] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cee578-ce30-4fed-b870-bbbea3958d9d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.660801] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8060e2ce-7bfb-48d9-bf10-2b4c042b4157 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.669096] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee190fb-c7ab-476b-bd81-258642575ff3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.683174] env[62476]: DEBUG nova.compute.provider_tree [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2179.694345] env[62476]: DEBUG nova.scheduler.client.report [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2179.708911] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.330s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.709468] env[62476]: ERROR nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2179.709468] env[62476]: Faults: ['InvalidArgument'] [ 2179.709468] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Traceback (most recent call last): [ 2179.709468] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2179.709468] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] self.driver.spawn(context, instance, image_meta, [ 2179.709468] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2179.709468] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2179.709468] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2179.709468] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] self._fetch_image_if_missing(context, vi) [ 2179.709468] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2179.709468] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] image_cache(vi, tmp_image_ds_loc) [ 2179.709468] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2179.709897] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] vm_util.copy_virtual_disk( [ 2179.709897] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2179.709897] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] session._wait_for_task(vmdk_copy_task) [ 2179.709897] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2179.709897] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] return self.wait_for_task(task_ref) [ 2179.709897] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2179.709897] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] return evt.wait() [ 2179.709897] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2179.709897] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] result = hub.switch() [ 2179.709897] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2179.709897] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] return self.greenlet.switch() [ 2179.709897] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2179.709897] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] self.f(*self.args, **self.kw) [ 2179.710312] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2179.710312] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] raise exceptions.translate_fault(task_info.error) [ 2179.710312] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2179.710312] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Faults: ['InvalidArgument'] [ 2179.710312] env[62476]: ERROR nova.compute.manager [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] [ 2179.710312] env[62476]: DEBUG nova.compute.utils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2179.711752] env[62476]: DEBUG nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Build of instance e6b815fb-fa2d-4797-8810-c2b891f375cf was re-scheduled: A specified parameter was not correct: fileType [ 2179.711752] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2179.712132] env[62476]: DEBUG nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2179.712310] env[62476]: DEBUG nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2179.712486] env[62476]: DEBUG nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2179.712649] env[62476]: DEBUG nova.network.neutron [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2180.076696] env[62476]: DEBUG nova.network.neutron [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2180.090896] env[62476]: INFO nova.compute.manager [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Took 0.38 seconds to deallocate network for instance. [ 2180.194736] env[62476]: INFO nova.scheduler.client.report [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Deleted allocations for instance e6b815fb-fa2d-4797-8810-c2b891f375cf [ 2180.218356] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f9c194cc-39d8-4642-95f8-bcdef1204d0d tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "e6b815fb-fa2d-4797-8810-c2b891f375cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 557.313s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.218562] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "e6b815fb-fa2d-4797-8810-c2b891f375cf" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 365.770s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2180.218746] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] During sync_power_state the instance has a pending task (spawning). Skip. [ 2180.218950] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "e6b815fb-fa2d-4797-8810-c2b891f375cf" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.219556] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5bfeb01f-7edf-409c-bbd5-1438a28cc240 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "e6b815fb-fa2d-4797-8810-c2b891f375cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 361.379s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2180.219817] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5bfeb01f-7edf-409c-bbd5-1438a28cc240 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Acquiring lock "e6b815fb-fa2d-4797-8810-c2b891f375cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2180.220049] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5bfeb01f-7edf-409c-bbd5-1438a28cc240 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "e6b815fb-fa2d-4797-8810-c2b891f375cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2180.220224] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5bfeb01f-7edf-409c-bbd5-1438a28cc240 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "e6b815fb-fa2d-4797-8810-c2b891f375cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.222344] env[62476]: INFO nova.compute.manager [None req-5bfeb01f-7edf-409c-bbd5-1438a28cc240 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Terminating instance [ 2180.224330] env[62476]: DEBUG nova.compute.manager [None req-5bfeb01f-7edf-409c-bbd5-1438a28cc240 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2180.224540] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5bfeb01f-7edf-409c-bbd5-1438a28cc240 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2180.224852] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-54f4fea6-a405-436d-a5a4-b9c6d68e48c9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.234747] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91cb953f-32ac-4d08-916d-299478999667 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.264297] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-5bfeb01f-7edf-409c-bbd5-1438a28cc240 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e6b815fb-fa2d-4797-8810-c2b891f375cf could not be found. [ 2180.264511] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5bfeb01f-7edf-409c-bbd5-1438a28cc240 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2180.264696] env[62476]: INFO nova.compute.manager [None req-5bfeb01f-7edf-409c-bbd5-1438a28cc240 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2180.264953] env[62476]: DEBUG oslo.service.loopingcall [None req-5bfeb01f-7edf-409c-bbd5-1438a28cc240 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2180.265192] env[62476]: DEBUG nova.compute.manager [-] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2180.265292] env[62476]: DEBUG nova.network.neutron [-] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2180.292435] env[62476]: DEBUG nova.network.neutron [-] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2180.300891] env[62476]: INFO nova.compute.manager [-] [instance: e6b815fb-fa2d-4797-8810-c2b891f375cf] Took 0.04 seconds to deallocate network for instance. [ 2180.389876] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5bfeb01f-7edf-409c-bbd5-1438a28cc240 tempest-AttachInterfacesTestJSON-1430345107 tempest-AttachInterfacesTestJSON-1430345107-project-member] Lock "e6b815fb-fa2d-4797-8810-c2b891f375cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.170s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2205.027884] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.039809] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2205.040051] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2205.040228] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2205.040389] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2205.041547] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-720e226c-f596-4e4d-8bb2-966afc41b7c3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.050736] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08bae43-4885-4af5-bad5-18fbe49754a8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.064808] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc3827f-a00b-49eb-b60f-522a3fcb6103 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.071618] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8cb1210-7eeb-406d-862a-c3e6cd6ddfca {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.101348] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180690MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2205.101505] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2205.101701] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2205.178731] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance cc61313f-d7db-4c5d-bb8e-1e516d2a89ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2205.178896] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 9497c622-7f14-4fc2-ac24-d611897a8be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2205.179032] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3462762c-09da-473b-b2ba-4dce6c99dd8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2205.179158] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2205.179282] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 8fdd45f2-0c21-461f-896e-698182bd5337 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2205.179400] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 11af6076-e985-477c-98a6-437843b26b02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2205.179518] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 56a5da15-57da-4d4d-a359-d90b780f67e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2205.179636] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2205.179827] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2205.179964] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=100GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2205.286965] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278a420a-348d-4786-85e2-9aa5605ae863 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.294847] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4d4e1f-e266-4af3-8c97-d37c7f419d3d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.324811] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23070186-7c80-43df-9345-c54cbb3669db {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.332819] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f53bd6c-8e8a-4718-8f05-40c0815f8b3e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.346214] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2205.354713] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2205.370573] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2205.370743] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.269s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2208.370266] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2210.026909] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2210.027187] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2210.027268] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2210.047050] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2210.047255] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2210.047349] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2210.047468] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2210.047606] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2210.047778] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 11af6076-e985-477c-98a6-437843b26b02] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2210.047913] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2210.048046] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2210.048172] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2210.048687] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2210.048865] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2210.049026] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2213.045537] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2215.028202] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2216.897653] env[62476]: DEBUG oslo_concurrency.lockutils [None req-93013b54-ceed-4bab-9695-0a1cc2236693 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Acquiring lock "56a5da15-57da-4d4d-a359-d90b780f67e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2217.026995] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2218.027133] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2228.264616] env[62476]: WARNING oslo_vmware.rw_handles [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2228.264616] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2228.264616] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2228.264616] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2228.264616] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2228.264616] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 2228.264616] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2228.264616] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2228.264616] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2228.264616] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2228.264616] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2228.264616] env[62476]: ERROR oslo_vmware.rw_handles [ 2228.265292] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/c21f61f8-0235-4a10-8db8-b723de7d4de2/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2228.267060] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2228.267336] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Copying Virtual Disk [datastore1] vmware_temp/c21f61f8-0235-4a10-8db8-b723de7d4de2/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/c21f61f8-0235-4a10-8db8-b723de7d4de2/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2228.267631] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d810b1e0-8a05-4184-a1d4-02454b454691 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.277834] env[62476]: DEBUG oslo_vmware.api [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Waiting for the task: (returnval){ [ 2228.277834] env[62476]: value = "task-4319205" [ 2228.277834] env[62476]: _type = "Task" [ 2228.277834] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2228.286349] env[62476]: DEBUG oslo_vmware.api [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Task: {'id': task-4319205, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.788571] env[62476]: DEBUG oslo_vmware.exceptions [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2228.788877] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2228.789466] env[62476]: ERROR nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2228.789466] env[62476]: Faults: ['InvalidArgument'] [ 2228.789466] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Traceback (most recent call last): [ 2228.789466] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2228.789466] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] yield resources [ 2228.789466] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2228.789466] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] self.driver.spawn(context, instance, image_meta, [ 2228.789466] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2228.789466] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2228.789466] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2228.789466] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] self._fetch_image_if_missing(context, vi) [ 2228.789466] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2228.789749] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] image_cache(vi, tmp_image_ds_loc) [ 2228.789749] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2228.789749] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] vm_util.copy_virtual_disk( [ 2228.789749] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2228.789749] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] session._wait_for_task(vmdk_copy_task) [ 2228.789749] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2228.789749] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] return self.wait_for_task(task_ref) [ 2228.789749] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2228.789749] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] return evt.wait() [ 2228.789749] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2228.789749] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] result = hub.switch() [ 2228.789749] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2228.789749] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] return self.greenlet.switch() [ 2228.790092] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2228.790092] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] self.f(*self.args, **self.kw) [ 2228.790092] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2228.790092] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] raise exceptions.translate_fault(task_info.error) [ 2228.790092] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2228.790092] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Faults: ['InvalidArgument'] [ 2228.790092] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] [ 2228.790092] env[62476]: INFO nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Terminating instance [ 2228.791378] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2228.791584] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2228.791835] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f386cb0-c142-4138-b679-65f15b4db9df {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.794102] env[62476]: DEBUG nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2228.794311] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2228.795122] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526617d0-a63c-4f87-902e-c116b9695a06 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.802492] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2228.802745] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2bfaa633-07c9-491b-b3b8-1da202a416b0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.805157] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2228.805337] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2228.806316] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a46583b1-6ed2-4ef5-b901-adba4de24d8f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.811427] env[62476]: DEBUG oslo_vmware.api [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Waiting for the task: (returnval){ [ 2228.811427] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52107a69-2b11-474c-af7a-d98490613a37" [ 2228.811427] env[62476]: _type = "Task" [ 2228.811427] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2228.819052] env[62476]: DEBUG oslo_vmware.api [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52107a69-2b11-474c-af7a-d98490613a37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.876099] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2228.876099] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2228.876099] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Deleting the datastore file [datastore1] cc61313f-d7db-4c5d-bb8e-1e516d2a89ce {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2228.876453] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-304f6aae-f186-42c9-aafd-5cc6658f7d3a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.883849] env[62476]: DEBUG oslo_vmware.api [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Waiting for the task: (returnval){ [ 2228.883849] env[62476]: value = "task-4319207" [ 2228.883849] env[62476]: _type = "Task" [ 2228.883849] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2228.892656] env[62476]: DEBUG oslo_vmware.api [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Task: {'id': task-4319207, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.322411] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2229.322762] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Creating directory with path [datastore1] vmware_temp/5a85b324-629a-4a70-82b6-6dd159e6812b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2229.322918] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b42fffd-8512-42c4-a4d4-71943a4d495c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.335635] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Created directory with path [datastore1] vmware_temp/5a85b324-629a-4a70-82b6-6dd159e6812b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2229.335837] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Fetch image to [datastore1] vmware_temp/5a85b324-629a-4a70-82b6-6dd159e6812b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2229.335994] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/5a85b324-629a-4a70-82b6-6dd159e6812b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2229.336797] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d6af33-20ad-4ba1-b0e2-07d8f95e27f0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.343747] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53a5899-605f-4163-906c-754b9e1a3eae {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.352803] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00613654-72c6-419d-8499-c35799f7db85 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.383749] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11892417-c3a8-45e2-a2d7-88bb2d064ae4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.394739] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ef0a7838-8fef-442b-897b-7531cec18139 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.396399] env[62476]: DEBUG oslo_vmware.api [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Task: {'id': task-4319207, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07351} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2229.396632] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2229.396825] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2229.397034] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2229.397246] env[62476]: INFO nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2229.399402] env[62476]: DEBUG nova.compute.claims [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2229.399586] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2229.399801] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.422120] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2229.561985] env[62476]: DEBUG oslo_vmware.rw_handles [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5a85b324-629a-4a70-82b6-6dd159e6812b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2229.619669] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0cc0150-7ffe-4501-8d24-4bc82ca246bb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.624369] env[62476]: DEBUG oslo_vmware.rw_handles [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2229.624499] env[62476]: DEBUG oslo_vmware.rw_handles [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5a85b324-629a-4a70-82b6-6dd159e6812b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2229.628869] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5d50af-a016-455b-80b5-875d5b2628d1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.658993] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-277a4f12-5bbd-4193-b782-b4e443f83807 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.666425] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab4b171-25c0-4114-bb16-47094805f880 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.680656] env[62476]: DEBUG nova.compute.provider_tree [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2229.691252] env[62476]: DEBUG nova.scheduler.client.report [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2229.705421] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.305s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2229.705976] env[62476]: ERROR nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2229.705976] env[62476]: Faults: ['InvalidArgument'] [ 2229.705976] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Traceback (most recent call last): [ 2229.705976] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2229.705976] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] self.driver.spawn(context, instance, image_meta, [ 2229.705976] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2229.705976] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2229.705976] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2229.705976] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] self._fetch_image_if_missing(context, vi) [ 2229.705976] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2229.705976] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] image_cache(vi, tmp_image_ds_loc) [ 2229.705976] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2229.706468] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] vm_util.copy_virtual_disk( [ 2229.706468] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2229.706468] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] session._wait_for_task(vmdk_copy_task) [ 2229.706468] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2229.706468] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] return self.wait_for_task(task_ref) [ 2229.706468] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2229.706468] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] return evt.wait() [ 2229.706468] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2229.706468] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] result = hub.switch() [ 2229.706468] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2229.706468] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] return self.greenlet.switch() [ 2229.706468] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2229.706468] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] self.f(*self.args, **self.kw) [ 2229.706982] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2229.706982] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] raise exceptions.translate_fault(task_info.error) [ 2229.706982] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2229.706982] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Faults: ['InvalidArgument'] [ 2229.706982] env[62476]: ERROR nova.compute.manager [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] [ 2229.706982] env[62476]: DEBUG nova.compute.utils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2229.708254] env[62476]: DEBUG nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Build of instance cc61313f-d7db-4c5d-bb8e-1e516d2a89ce was re-scheduled: A specified parameter was not correct: fileType [ 2229.708254] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2229.708644] env[62476]: DEBUG nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2229.708864] env[62476]: DEBUG nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2229.708997] env[62476]: DEBUG nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2229.709199] env[62476]: DEBUG nova.network.neutron [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2230.028410] env[62476]: DEBUG nova.network.neutron [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2230.042877] env[62476]: INFO nova.compute.manager [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Took 0.33 seconds to deallocate network for instance. [ 2230.163966] env[62476]: INFO nova.scheduler.client.report [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Deleted allocations for instance cc61313f-d7db-4c5d-bb8e-1e516d2a89ce [ 2230.192853] env[62476]: DEBUG oslo_concurrency.lockutils [None req-666d01ed-6b69-43de-a073-caf48d74bc15 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 501.629s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2230.193154] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 415.744s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2230.193348] env[62476]: INFO nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] During sync_power_state the instance has a pending task (spawning). Skip. [ 2230.193528] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2230.194066] env[62476]: DEBUG oslo_concurrency.lockutils [None req-48d84c0f-64b3-4799-84b0-2d9296144e10 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 305.863s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2230.194291] env[62476]: DEBUG oslo_concurrency.lockutils [None req-48d84c0f-64b3-4799-84b0-2d9296144e10 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2230.194497] env[62476]: DEBUG oslo_concurrency.lockutils [None req-48d84c0f-64b3-4799-84b0-2d9296144e10 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2230.194663] env[62476]: DEBUG oslo_concurrency.lockutils [None req-48d84c0f-64b3-4799-84b0-2d9296144e10 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2230.197542] env[62476]: INFO nova.compute.manager [None req-48d84c0f-64b3-4799-84b0-2d9296144e10 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Terminating instance [ 2230.199356] env[62476]: DEBUG nova.compute.manager [None req-48d84c0f-64b3-4799-84b0-2d9296144e10 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2230.199552] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-48d84c0f-64b3-4799-84b0-2d9296144e10 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2230.199816] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-98eea579-b2c0-4951-a85a-f0db97aac871 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.210398] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68f61b8-bf3b-4549-a7d8-9b2b7c043254 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.240566] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-48d84c0f-64b3-4799-84b0-2d9296144e10 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cc61313f-d7db-4c5d-bb8e-1e516d2a89ce could not be found. [ 2230.240824] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-48d84c0f-64b3-4799-84b0-2d9296144e10 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2230.240962] env[62476]: INFO nova.compute.manager [None req-48d84c0f-64b3-4799-84b0-2d9296144e10 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2230.241252] env[62476]: DEBUG oslo.service.loopingcall [None req-48d84c0f-64b3-4799-84b0-2d9296144e10 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2230.241530] env[62476]: DEBUG nova.compute.manager [-] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2230.241629] env[62476]: DEBUG nova.network.neutron [-] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2230.267477] env[62476]: DEBUG nova.network.neutron [-] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2230.275519] env[62476]: INFO nova.compute.manager [-] [instance: cc61313f-d7db-4c5d-bb8e-1e516d2a89ce] Took 0.03 seconds to deallocate network for instance. [ 2230.397897] env[62476]: DEBUG oslo_concurrency.lockutils [None req-48d84c0f-64b3-4799-84b0-2d9296144e10 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Lock "cc61313f-d7db-4c5d-bb8e-1e516d2a89ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.204s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.028058] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2267.040976] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2267.041232] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2267.041403] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.041562] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2267.042762] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed7f4e3-b297-4cf6-bd42-38a05ff39e66 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.052032] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f68518f-0e22-4736-8d95-902ae4fbc5b3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.066960] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4cc76f4-ce72-4250-8ad2-cb4f51cd8ac7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.073883] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b248b562-d8e0-4a6f-9556-5c717091dbb4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.103360] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180722MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2267.103599] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2267.103908] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2267.170914] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 9497c622-7f14-4fc2-ac24-d611897a8be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.171098] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3462762c-09da-473b-b2ba-4dce6c99dd8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.171230] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.171352] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 8fdd45f2-0c21-461f-896e-698182bd5337 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.171475] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 11af6076-e985-477c-98a6-437843b26b02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.171612] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 56a5da15-57da-4d4d-a359-d90b780f67e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.171797] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2267.171992] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2267.172148] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=100GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2267.263474] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c6f104-f6ad-4036-8e41-8bf9bbed0ae9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.272650] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2172c3-b2d2-4adb-ac8f-474ffdfd884f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.303347] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ef5441-6bb3-4ec1-83ab-9b1d74e27dee {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.311148] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75218af-50b6-4167-917d-b6f2afc6e51b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.324522] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2267.332870] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2267.346326] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2267.346503] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.243s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.346482] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2270.027467] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2271.027616] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2271.027990] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2272.027682] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2272.028051] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2272.028051] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2272.047435] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2272.047604] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2272.047743] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2272.047900] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2272.048086] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 11af6076-e985-477c-98a6-437843b26b02] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2272.048224] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2272.048352] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2272.048476] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2273.043532] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2275.657230] env[62476]: WARNING oslo_vmware.rw_handles [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2275.657230] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2275.657230] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2275.657230] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2275.657230] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2275.657230] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 2275.657230] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2275.657230] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2275.657230] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2275.657230] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2275.657230] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2275.657230] env[62476]: ERROR oslo_vmware.rw_handles [ 2275.658155] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/5a85b324-629a-4a70-82b6-6dd159e6812b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2275.659721] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2275.659984] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Copying Virtual Disk [datastore1] vmware_temp/5a85b324-629a-4a70-82b6-6dd159e6812b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/5a85b324-629a-4a70-82b6-6dd159e6812b/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2275.660293] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-099c0f07-3ded-41d3-8b0f-6d3f65395ab6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.668313] env[62476]: DEBUG oslo_vmware.api [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Waiting for the task: (returnval){ [ 2275.668313] env[62476]: value = "task-4319217" [ 2275.668313] env[62476]: _type = "Task" [ 2275.668313] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2275.676901] env[62476]: DEBUG oslo_vmware.api [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Task: {'id': task-4319217, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.026996] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2276.180991] env[62476]: DEBUG oslo_vmware.exceptions [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2276.180991] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2276.182110] env[62476]: ERROR nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2276.182110] env[62476]: Faults: ['InvalidArgument'] [ 2276.182110] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Traceback (most recent call last): [ 2276.182110] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2276.182110] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] yield resources [ 2276.182110] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2276.182110] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] self.driver.spawn(context, instance, image_meta, [ 2276.182110] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2276.182110] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2276.182110] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2276.182110] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] self._fetch_image_if_missing(context, vi) [ 2276.182110] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2276.182564] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] image_cache(vi, tmp_image_ds_loc) [ 2276.182564] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2276.182564] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] vm_util.copy_virtual_disk( [ 2276.182564] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2276.182564] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] session._wait_for_task(vmdk_copy_task) [ 2276.182564] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2276.182564] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] return self.wait_for_task(task_ref) [ 2276.182564] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2276.182564] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] return evt.wait() [ 2276.182564] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2276.182564] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] result = hub.switch() [ 2276.182564] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2276.182564] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] return self.greenlet.switch() [ 2276.182955] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2276.182955] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] self.f(*self.args, **self.kw) [ 2276.182955] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2276.182955] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] raise exceptions.translate_fault(task_info.error) [ 2276.182955] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2276.182955] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Faults: ['InvalidArgument'] [ 2276.182955] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] [ 2276.182955] env[62476]: INFO nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Terminating instance [ 2276.183384] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2276.183596] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2276.183849] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23758c60-a45f-425d-b107-b96a689190db {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.186827] env[62476]: DEBUG nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2276.186827] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2276.187162] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9c462c-0454-4618-947e-794cb2a084e9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.194831] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2276.195085] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe1dd9a7-419c-40ee-b0ea-7b68e50c1277 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.197443] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2276.198065] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2276.198723] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfe1e015-296b-423f-a0df-67017cae5402 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.204453] env[62476]: DEBUG oslo_vmware.api [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Waiting for the task: (returnval){ [ 2276.204453] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]521da8af-4b4d-4b2f-c555-dda2e3ed46d4" [ 2276.204453] env[62476]: _type = "Task" [ 2276.204453] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2276.213944] env[62476]: DEBUG oslo_vmware.api [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]521da8af-4b4d-4b2f-c555-dda2e3ed46d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.275602] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2276.275830] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2276.276031] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Deleting the datastore file [datastore1] 9497c622-7f14-4fc2-ac24-d611897a8be9 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2276.276326] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b912cb79-8cbb-48ab-b151-96e13e882bfa {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.284410] env[62476]: DEBUG oslo_vmware.api [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Waiting for the task: (returnval){ [ 2276.284410] env[62476]: value = "task-4319219" [ 2276.284410] env[62476]: _type = "Task" [ 2276.284410] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2276.293266] env[62476]: DEBUG oslo_vmware.api [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Task: {'id': task-4319219, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.714982] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2276.715369] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Creating directory with path [datastore1] vmware_temp/97a2753d-3396-4ecb-a961-36cd03f0e197/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2276.715532] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-154226c6-0262-41b4-a503-e1306f281a31 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.726587] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Created directory with path [datastore1] vmware_temp/97a2753d-3396-4ecb-a961-36cd03f0e197/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2276.726813] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Fetch image to [datastore1] vmware_temp/97a2753d-3396-4ecb-a961-36cd03f0e197/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2276.726994] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/97a2753d-3396-4ecb-a961-36cd03f0e197/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2276.727887] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd9ee08-0cad-4bee-9ff5-765592a1bfc4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.734833] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b4c6ca-b81f-4215-b53c-6d1946e35221 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.744559] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2134a6c-c407-47e9-a678-f01a689e0018 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.774962] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e76998-c3ac-4599-8533-1a9fd6f54dbc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.781542] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3a03475a-f199-40ac-96b2-14dad2f6c76a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.792691] env[62476]: DEBUG oslo_vmware.api [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Task: {'id': task-4319219, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064077} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2276.792947] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2276.793246] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2276.793451] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2276.793643] env[62476]: INFO nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2276.796147] env[62476]: DEBUG nova.compute.claims [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2276.796323] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2276.796541] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2276.805434] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2276.860139] env[62476]: DEBUG oslo_vmware.rw_handles [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/97a2753d-3396-4ecb-a961-36cd03f0e197/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2276.919609] env[62476]: DEBUG oslo_vmware.rw_handles [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2276.919826] env[62476]: DEBUG oslo_vmware.rw_handles [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/97a2753d-3396-4ecb-a961-36cd03f0e197/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2277.004553] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19657f0-2a69-490a-aca4-8873eb0f1c30 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.012339] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1da229-0330-4b00-b814-7f414b488357 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.043096] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2277.044524] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da3c8e7-131f-4d87-ab84-1390f412b8ae {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.052664] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b657879-36c4-4f41-81d6-b93c8389cd66 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.066753] env[62476]: DEBUG nova.compute.provider_tree [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2277.075481] env[62476]: DEBUG nova.scheduler.client.report [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2277.090019] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.293s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.090590] env[62476]: ERROR nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2277.090590] env[62476]: Faults: ['InvalidArgument'] [ 2277.090590] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Traceback (most recent call last): [ 2277.090590] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2277.090590] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] self.driver.spawn(context, instance, image_meta, [ 2277.090590] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2277.090590] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2277.090590] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2277.090590] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] self._fetch_image_if_missing(context, vi) [ 2277.090590] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2277.090590] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] image_cache(vi, tmp_image_ds_loc) [ 2277.090590] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2277.090902] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] vm_util.copy_virtual_disk( [ 2277.090902] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2277.090902] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] session._wait_for_task(vmdk_copy_task) [ 2277.090902] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2277.090902] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] return self.wait_for_task(task_ref) [ 2277.090902] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2277.090902] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] return evt.wait() [ 2277.090902] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2277.090902] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] result = hub.switch() [ 2277.090902] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2277.090902] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] return self.greenlet.switch() [ 2277.090902] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2277.090902] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] self.f(*self.args, **self.kw) [ 2277.091217] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2277.091217] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] raise exceptions.translate_fault(task_info.error) [ 2277.091217] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2277.091217] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Faults: ['InvalidArgument'] [ 2277.091217] env[62476]: ERROR nova.compute.manager [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] [ 2277.091352] env[62476]: DEBUG nova.compute.utils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2277.092805] env[62476]: DEBUG nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Build of instance 9497c622-7f14-4fc2-ac24-d611897a8be9 was re-scheduled: A specified parameter was not correct: fileType [ 2277.092805] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2277.093197] env[62476]: DEBUG nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2277.093372] env[62476]: DEBUG nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2277.093543] env[62476]: DEBUG nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2277.093707] env[62476]: DEBUG nova.network.neutron [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2277.382150] env[62476]: DEBUG nova.network.neutron [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2277.399339] env[62476]: INFO nova.compute.manager [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Took 0.31 seconds to deallocate network for instance. [ 2277.506311] env[62476]: INFO nova.scheduler.client.report [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Deleted allocations for instance 9497c622-7f14-4fc2-ac24-d611897a8be9 [ 2277.535044] env[62476]: DEBUG oslo_concurrency.lockutils [None req-3ec41f10-0e0b-4954-9c6a-63184f8a4550 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "9497c622-7f14-4fc2-ac24-d611897a8be9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 537.958s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.535044] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b759db9c-cd4a-4982-bf7a-eabb48ead7e1 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "9497c622-7f14-4fc2-ac24-d611897a8be9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 145.225s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2277.535184] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b759db9c-cd4a-4982-bf7a-eabb48ead7e1 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "9497c622-7f14-4fc2-ac24-d611897a8be9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2277.535274] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b759db9c-cd4a-4982-bf7a-eabb48ead7e1 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "9497c622-7f14-4fc2-ac24-d611897a8be9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2277.535504] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b759db9c-cd4a-4982-bf7a-eabb48ead7e1 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "9497c622-7f14-4fc2-ac24-d611897a8be9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.540988] env[62476]: INFO nova.compute.manager [None req-b759db9c-cd4a-4982-bf7a-eabb48ead7e1 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Terminating instance [ 2277.543862] env[62476]: DEBUG nova.compute.manager [None req-b759db9c-cd4a-4982-bf7a-eabb48ead7e1 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2277.543862] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b759db9c-cd4a-4982-bf7a-eabb48ead7e1 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2277.544058] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-22f77a8c-49bb-491c-b7ab-80032e04f3e6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.558825] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fa83df-ed65-451d-92cb-9b8765b54401 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.589117] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-b759db9c-cd4a-4982-bf7a-eabb48ead7e1 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9497c622-7f14-4fc2-ac24-d611897a8be9 could not be found. [ 2277.589401] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b759db9c-cd4a-4982-bf7a-eabb48ead7e1 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2277.589585] env[62476]: INFO nova.compute.manager [None req-b759db9c-cd4a-4982-bf7a-eabb48ead7e1 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2277.589841] env[62476]: DEBUG oslo.service.loopingcall [None req-b759db9c-cd4a-4982-bf7a-eabb48ead7e1 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2277.590091] env[62476]: DEBUG nova.compute.manager [-] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2277.590193] env[62476]: DEBUG nova.network.neutron [-] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2277.618212] env[62476]: DEBUG nova.network.neutron [-] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2277.627096] env[62476]: INFO nova.compute.manager [-] [instance: 9497c622-7f14-4fc2-ac24-d611897a8be9] Took 0.04 seconds to deallocate network for instance. [ 2277.745405] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b759db9c-cd4a-4982-bf7a-eabb48ead7e1 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "9497c622-7f14-4fc2-ac24-d611897a8be9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.210s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2280.027960] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2280.453022] env[62476]: DEBUG oslo_concurrency.lockutils [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Acquiring lock "653f1f54-0432-4138-a577-259fbaa16cc5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2280.453320] env[62476]: DEBUG oslo_concurrency.lockutils [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Lock "653f1f54-0432-4138-a577-259fbaa16cc5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2280.465942] env[62476]: DEBUG nova.compute.manager [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2280.519173] env[62476]: DEBUG oslo_concurrency.lockutils [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2280.519244] env[62476]: DEBUG oslo_concurrency.lockutils [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2280.521020] env[62476]: INFO nova.compute.claims [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2280.713637] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f574213-1ce1-468f-96bc-addb587a3b97 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.721911] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9260a7e-bc4c-4940-b3ff-fc27d0009c37 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.754130] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8ead8f-20b3-4408-ba0e-bc34e440e4a1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.762283] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30555792-185f-4897-8264-1b32055f394b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.776419] env[62476]: DEBUG nova.compute.provider_tree [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2280.786592] env[62476]: DEBUG nova.scheduler.client.report [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2280.802188] env[62476]: DEBUG oslo_concurrency.lockutils [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.283s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2280.802568] env[62476]: DEBUG nova.compute.manager [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2280.845378] env[62476]: DEBUG nova.compute.utils [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2280.847722] env[62476]: DEBUG nova.compute.manager [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2280.848037] env[62476]: DEBUG nova.network.neutron [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2280.858780] env[62476]: DEBUG nova.compute.manager [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2280.892308] env[62476]: INFO nova.virt.block_device [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Booting with volume e6704a3c-b270-4e73-8ffe-180e2c5cc720 at /dev/sda [ 2280.924816] env[62476]: DEBUG nova.policy [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee1a4776c4414166a8df6441631a52b7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a86afbb6d38144dca6306277edb29ba0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 2280.941072] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5db37851-2895-45c4-92e3-5ddc78962852 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.950487] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20a76bb-4763-48ca-8f36-7a86a3c951d8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.979462] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-562c63ea-0a26-4ab7-9320-c4ef0c617cc2 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.989395] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66a4520-eb8d-440a-a422-ea30da0b0f2e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.019491] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2add367f-aa7f-4cdf-b02f-eb482c9de17a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.027193] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4c42b6-e7e9-4d44-a329-07d1ec3848d1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.042382] env[62476]: DEBUG nova.virt.block_device [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Updating existing volume attachment record: ba31184d-5adc-4b76-b5a0-1717c96e8d4c {{(pid=62476) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 2281.284086] env[62476]: DEBUG nova.compute.manager [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2281.284637] env[62476]: DEBUG nova.virt.hardware [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2281.284849] env[62476]: DEBUG nova.virt.hardware [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2281.285058] env[62476]: DEBUG nova.virt.hardware [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2281.285268] env[62476]: DEBUG nova.virt.hardware [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2281.285454] env[62476]: DEBUG nova.virt.hardware [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2281.285620] env[62476]: DEBUG nova.virt.hardware [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2281.285831] env[62476]: DEBUG nova.virt.hardware [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2281.286147] env[62476]: DEBUG nova.virt.hardware [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2281.286227] env[62476]: DEBUG nova.virt.hardware [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2281.286331] env[62476]: DEBUG nova.virt.hardware [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2281.286655] env[62476]: DEBUG nova.virt.hardware [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2281.288133] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0efdc55a-fb0f-4bf5-bb37-89d76fa0eea8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.298399] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e92e74b-bf03-4c4c-b4cf-2a8bd2c89585 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.568996] env[62476]: DEBUG nova.network.neutron [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Successfully created port: c1de03a6-b51f-4cc8-ab89-977de4ab8ad3 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2282.208524] env[62476]: DEBUG nova.compute.manager [req-3316ef73-c9a5-4808-b4ed-72d551e5a67c req-fd68a0f1-ec73-4793-85c8-772291dc3ab4 service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Received event network-vif-plugged-c1de03a6-b51f-4cc8-ab89-977de4ab8ad3 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2282.208825] env[62476]: DEBUG oslo_concurrency.lockutils [req-3316ef73-c9a5-4808-b4ed-72d551e5a67c req-fd68a0f1-ec73-4793-85c8-772291dc3ab4 service nova] Acquiring lock "653f1f54-0432-4138-a577-259fbaa16cc5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2282.209028] env[62476]: DEBUG oslo_concurrency.lockutils [req-3316ef73-c9a5-4808-b4ed-72d551e5a67c req-fd68a0f1-ec73-4793-85c8-772291dc3ab4 service nova] Lock "653f1f54-0432-4138-a577-259fbaa16cc5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2282.209207] env[62476]: DEBUG oslo_concurrency.lockutils [req-3316ef73-c9a5-4808-b4ed-72d551e5a67c req-fd68a0f1-ec73-4793-85c8-772291dc3ab4 service nova] Lock "653f1f54-0432-4138-a577-259fbaa16cc5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2282.209380] env[62476]: DEBUG nova.compute.manager [req-3316ef73-c9a5-4808-b4ed-72d551e5a67c req-fd68a0f1-ec73-4793-85c8-772291dc3ab4 service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] No waiting events found dispatching network-vif-plugged-c1de03a6-b51f-4cc8-ab89-977de4ab8ad3 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2282.209544] env[62476]: WARNING nova.compute.manager [req-3316ef73-c9a5-4808-b4ed-72d551e5a67c req-fd68a0f1-ec73-4793-85c8-772291dc3ab4 service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Received unexpected event network-vif-plugged-c1de03a6-b51f-4cc8-ab89-977de4ab8ad3 for instance with vm_state building and task_state spawning. [ 2282.333617] env[62476]: DEBUG nova.network.neutron [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Successfully updated port: c1de03a6-b51f-4cc8-ab89-977de4ab8ad3 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2282.353651] env[62476]: DEBUG oslo_concurrency.lockutils [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Acquiring lock "refresh_cache-653f1f54-0432-4138-a577-259fbaa16cc5" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2282.353915] env[62476]: DEBUG oslo_concurrency.lockutils [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Acquired lock "refresh_cache-653f1f54-0432-4138-a577-259fbaa16cc5" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2282.354103] env[62476]: DEBUG nova.network.neutron [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2282.416447] env[62476]: DEBUG nova.network.neutron [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2282.606966] env[62476]: DEBUG nova.network.neutron [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Updating instance_info_cache with network_info: [{"id": "c1de03a6-b51f-4cc8-ab89-977de4ab8ad3", "address": "fa:16:3e:4b:d8:81", "network": {"id": "6e557c04-9a70-4fb4-a6dc-c0f17e3f52d7", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-701737079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a86afbb6d38144dca6306277edb29ba0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1de03a6-b5", "ovs_interfaceid": "c1de03a6-b51f-4cc8-ab89-977de4ab8ad3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2282.624139] env[62476]: DEBUG oslo_concurrency.lockutils [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Releasing lock "refresh_cache-653f1f54-0432-4138-a577-259fbaa16cc5" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2282.624503] env[62476]: DEBUG nova.compute.manager [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Instance network_info: |[{"id": "c1de03a6-b51f-4cc8-ab89-977de4ab8ad3", "address": "fa:16:3e:4b:d8:81", "network": {"id": "6e557c04-9a70-4fb4-a6dc-c0f17e3f52d7", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-701737079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a86afbb6d38144dca6306277edb29ba0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1de03a6-b5", "ovs_interfaceid": "c1de03a6-b51f-4cc8-ab89-977de4ab8ad3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2282.625267] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:d8:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c2daf7c-c01b-41b1-a09a-fb8b893b4c80', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1de03a6-b51f-4cc8-ab89-977de4ab8ad3', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2282.633430] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Creating folder: Project (a86afbb6d38144dca6306277edb29ba0). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2282.634473] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df684617-cf93-41af-b232-487c6bac4db4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.649760] env[62476]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 2282.649967] env[62476]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62476) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 2282.650398] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Folder already exists: Project (a86afbb6d38144dca6306277edb29ba0). Parent ref: group-v849485. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2282.650623] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Creating folder: Instances. Parent ref: group-v849586. {{(pid=62476) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2282.650896] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a19d932c-d267-4d09-ae6b-d6f2a3df0a6f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.663317] env[62476]: INFO nova.virt.vmwareapi.vm_util [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Created folder: Instances in parent group-v849586. [ 2282.663627] env[62476]: DEBUG oslo.service.loopingcall [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2282.663841] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2282.664470] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-004f1174-1a09-451d-b7a8-e48cce88b387 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.686251] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2282.686251] env[62476]: value = "task-4319223" [ 2282.686251] env[62476]: _type = "Task" [ 2282.686251] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.694518] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319223, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.196852] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319223, 'name': CreateVM_Task, 'duration_secs': 0.314433} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2283.197099] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2283.197873] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'delete_on_termination': True, 'mount_device': '/dev/sda', 'boot_index': 0, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-849589', 'volume_id': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'name': 'volume-e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '653f1f54-0432-4138-a577-259fbaa16cc5', 'attached_at': '', 'detached_at': '', 'volume_id': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'serial': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720'}, 'attachment_id': 'ba31184d-5adc-4b76-b5a0-1717c96e8d4c', 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=62476) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2283.198210] env[62476]: DEBUG nova.virt.vmwareapi.volumeops [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Root volume attach. Driver type: vmdk {{(pid=62476) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 2283.199069] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2d5a2e-d045-4191-8e5a-30b2282db773 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.208235] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1cd11d-abae-40bc-97fe-a5c5551080a3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.215837] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6fb0b9-2535-4c5c-87ae-efefd668d459 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.224336] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-52632474-1d6d-46fd-812f-2349ee26a85f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.233160] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Waiting for the task: (returnval){ [ 2283.233160] env[62476]: value = "task-4319224" [ 2283.233160] env[62476]: _type = "Task" [ 2283.233160] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.242220] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319224, 'name': RelocateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.749027] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319224, 'name': RelocateVM_Task} progress is 40%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.240392] env[62476]: DEBUG nova.compute.manager [req-1ac7dc6e-f66a-4ee4-9a61-719d0e6da537 req-9faf7bcf-9c11-4f24-8103-3793f949d6ca service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Received event network-changed-c1de03a6-b51f-4cc8-ab89-977de4ab8ad3 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2284.240772] env[62476]: DEBUG nova.compute.manager [req-1ac7dc6e-f66a-4ee4-9a61-719d0e6da537 req-9faf7bcf-9c11-4f24-8103-3793f949d6ca service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Refreshing instance network info cache due to event network-changed-c1de03a6-b51f-4cc8-ab89-977de4ab8ad3. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2284.240817] env[62476]: DEBUG oslo_concurrency.lockutils [req-1ac7dc6e-f66a-4ee4-9a61-719d0e6da537 req-9faf7bcf-9c11-4f24-8103-3793f949d6ca service nova] Acquiring lock "refresh_cache-653f1f54-0432-4138-a577-259fbaa16cc5" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2284.240930] env[62476]: DEBUG oslo_concurrency.lockutils [req-1ac7dc6e-f66a-4ee4-9a61-719d0e6da537 req-9faf7bcf-9c11-4f24-8103-3793f949d6ca service nova] Acquired lock "refresh_cache-653f1f54-0432-4138-a577-259fbaa16cc5" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2284.241102] env[62476]: DEBUG nova.network.neutron [req-1ac7dc6e-f66a-4ee4-9a61-719d0e6da537 req-9faf7bcf-9c11-4f24-8103-3793f949d6ca service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Refreshing network info cache for port c1de03a6-b51f-4cc8-ab89-977de4ab8ad3 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2284.251354] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319224, 'name': RelocateVM_Task} progress is 54%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.619490] env[62476]: DEBUG nova.network.neutron [req-1ac7dc6e-f66a-4ee4-9a61-719d0e6da537 req-9faf7bcf-9c11-4f24-8103-3793f949d6ca service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Updated VIF entry in instance network info cache for port c1de03a6-b51f-4cc8-ab89-977de4ab8ad3. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2284.619993] env[62476]: DEBUG nova.network.neutron [req-1ac7dc6e-f66a-4ee4-9a61-719d0e6da537 req-9faf7bcf-9c11-4f24-8103-3793f949d6ca service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Updating instance_info_cache with network_info: [{"id": "c1de03a6-b51f-4cc8-ab89-977de4ab8ad3", "address": "fa:16:3e:4b:d8:81", "network": {"id": "6e557c04-9a70-4fb4-a6dc-c0f17e3f52d7", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-701737079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a86afbb6d38144dca6306277edb29ba0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1de03a6-b5", "ovs_interfaceid": "c1de03a6-b51f-4cc8-ab89-977de4ab8ad3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2284.630717] env[62476]: DEBUG oslo_concurrency.lockutils [req-1ac7dc6e-f66a-4ee4-9a61-719d0e6da537 req-9faf7bcf-9c11-4f24-8103-3793f949d6ca service nova] Releasing lock "refresh_cache-653f1f54-0432-4138-a577-259fbaa16cc5" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2284.748885] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319224, 'name': RelocateVM_Task} progress is 67%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.023293] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2285.250024] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319224, 'name': RelocateVM_Task} progress is 82%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.748126] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319224, 'name': RelocateVM_Task} progress is 97%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.247752] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319224, 'name': RelocateVM_Task} progress is 98%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.749773] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319224, 'name': RelocateVM_Task, 'duration_secs': 3.10393} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.750072] env[62476]: DEBUG nova.virt.vmwareapi.volumeops [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Volume attach. Driver type: vmdk {{(pid=62476) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2286.750275] env[62476]: DEBUG nova.virt.vmwareapi.volumeops [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-849589', 'volume_id': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'name': 'volume-e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '653f1f54-0432-4138-a577-259fbaa16cc5', 'attached_at': '', 'detached_at': '', 'volume_id': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'serial': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720'} {{(pid=62476) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2286.751064] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7602a1c-0499-48c9-bc04-86534d2c7b26 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.767957] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67f5003-11a2-4d18-9531-6fd0aed2da29 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.791842] env[62476]: DEBUG nova.virt.vmwareapi.volumeops [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] volume-e6704a3c-b270-4e73-8ffe-180e2c5cc720/volume-e6704a3c-b270-4e73-8ffe-180e2c5cc720.vmdk or device None with type thin {{(pid=62476) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2286.792224] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ecc955b-5172-4bad-a60a-53406bf8cc5c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.811877] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Waiting for the task: (returnval){ [ 2286.811877] env[62476]: value = "task-4319225" [ 2286.811877] env[62476]: _type = "Task" [ 2286.811877] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.819841] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319225, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.322153] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319225, 'name': ReconfigVM_Task, 'duration_secs': 0.265758} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.322467] env[62476]: DEBUG nova.virt.vmwareapi.volumeops [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Reconfigured VM instance instance-0000005c to attach disk [datastore1] volume-e6704a3c-b270-4e73-8ffe-180e2c5cc720/volume-e6704a3c-b270-4e73-8ffe-180e2c5cc720.vmdk or device None with type thin {{(pid=62476) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2287.327633] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb99e6f3-be12-44cd-b6c5-3d63b1f11afa {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.342611] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Waiting for the task: (returnval){ [ 2287.342611] env[62476]: value = "task-4319226" [ 2287.342611] env[62476]: _type = "Task" [ 2287.342611] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.352020] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319226, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.852946] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319226, 'name': ReconfigVM_Task, 'duration_secs': 0.112132} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.853377] env[62476]: DEBUG nova.virt.vmwareapi.volumeops [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-849589', 'volume_id': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'name': 'volume-e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '653f1f54-0432-4138-a577-259fbaa16cc5', 'attached_at': '', 'detached_at': '', 'volume_id': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'serial': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720'} {{(pid=62476) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2287.853933] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c89670ae-b2f8-4486-9bf5-de341bfb881a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.861192] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Waiting for the task: (returnval){ [ 2287.861192] env[62476]: value = "task-4319227" [ 2287.861192] env[62476]: _type = "Task" [ 2287.861192] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.869753] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319227, 'name': Rename_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.370740] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319227, 'name': Rename_Task, 'duration_secs': 0.130326} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.371035] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Powering on the VM {{(pid=62476) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 2288.371300] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-174fea91-c051-4bd2-8138-882bb10439a3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.378378] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Waiting for the task: (returnval){ [ 2288.378378] env[62476]: value = "task-4319228" [ 2288.378378] env[62476]: _type = "Task" [ 2288.378378] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.388490] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319228, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.888952] env[62476]: DEBUG oslo_vmware.api [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319228, 'name': PowerOnVM_Task, 'duration_secs': 0.43194} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.889465] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Powered on the VM {{(pid=62476) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 2288.889465] env[62476]: INFO nova.compute.manager [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Took 7.61 seconds to spawn the instance on the hypervisor. [ 2288.889716] env[62476]: DEBUG nova.compute.manager [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Checking state {{(pid=62476) _get_power_state /opt/stack/nova/nova/compute/manager.py:1783}} [ 2288.890567] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5facdd18-f9e6-45f0-a6a3-89b5502f1360 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.950702] env[62476]: INFO nova.compute.manager [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Took 8.45 seconds to build instance. [ 2288.972846] env[62476]: DEBUG oslo_concurrency.lockutils [None req-230a981c-e12a-4255-91b8-2420010839a5 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Lock "653f1f54-0432-4138-a577-259fbaa16cc5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 8.519s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2290.456299] env[62476]: DEBUG nova.compute.manager [req-9e22a271-9c00-4d87-9781-4ee856ca371d req-ae6cf111-5258-4c64-965e-9e082b7d1363 service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Received event network-changed-c1de03a6-b51f-4cc8-ab89-977de4ab8ad3 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2290.456832] env[62476]: DEBUG nova.compute.manager [req-9e22a271-9c00-4d87-9781-4ee856ca371d req-ae6cf111-5258-4c64-965e-9e082b7d1363 service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Refreshing instance network info cache due to event network-changed-c1de03a6-b51f-4cc8-ab89-977de4ab8ad3. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2290.456832] env[62476]: DEBUG oslo_concurrency.lockutils [req-9e22a271-9c00-4d87-9781-4ee856ca371d req-ae6cf111-5258-4c64-965e-9e082b7d1363 service nova] Acquiring lock "refresh_cache-653f1f54-0432-4138-a577-259fbaa16cc5" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2290.457076] env[62476]: DEBUG oslo_concurrency.lockutils [req-9e22a271-9c00-4d87-9781-4ee856ca371d req-ae6cf111-5258-4c64-965e-9e082b7d1363 service nova] Acquired lock "refresh_cache-653f1f54-0432-4138-a577-259fbaa16cc5" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2290.457076] env[62476]: DEBUG nova.network.neutron [req-9e22a271-9c00-4d87-9781-4ee856ca371d req-ae6cf111-5258-4c64-965e-9e082b7d1363 service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Refreshing network info cache for port c1de03a6-b51f-4cc8-ab89-977de4ab8ad3 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2290.849210] env[62476]: DEBUG nova.network.neutron [req-9e22a271-9c00-4d87-9781-4ee856ca371d req-ae6cf111-5258-4c64-965e-9e082b7d1363 service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Updated VIF entry in instance network info cache for port c1de03a6-b51f-4cc8-ab89-977de4ab8ad3. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2290.849915] env[62476]: DEBUG nova.network.neutron [req-9e22a271-9c00-4d87-9781-4ee856ca371d req-ae6cf111-5258-4c64-965e-9e082b7d1363 service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Updating instance_info_cache with network_info: [{"id": "c1de03a6-b51f-4cc8-ab89-977de4ab8ad3", "address": "fa:16:3e:4b:d8:81", "network": {"id": "6e557c04-9a70-4fb4-a6dc-c0f17e3f52d7", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-701737079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a86afbb6d38144dca6306277edb29ba0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c2daf7c-c01b-41b1-a09a-fb8b893b4c80", "external-id": "nsx-vlan-transportzone-89", "segmentation_id": 89, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1de03a6-b5", "ovs_interfaceid": "c1de03a6-b51f-4cc8-ab89-977de4ab8ad3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2290.865995] env[62476]: DEBUG oslo_concurrency.lockutils [req-9e22a271-9c00-4d87-9781-4ee856ca371d req-ae6cf111-5258-4c64-965e-9e082b7d1363 service nova] Releasing lock "refresh_cache-653f1f54-0432-4138-a577-259fbaa16cc5" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2301.092173] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "3efb9939-d7ad-42dc-b3bc-472a2e34f7a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2301.092506] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "3efb9939-d7ad-42dc-b3bc-472a2e34f7a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2301.104016] env[62476]: DEBUG nova.compute.manager [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2301.154214] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2301.154480] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2301.156334] env[62476]: INFO nova.compute.claims [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2301.318361] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c05b5c-4750-46aa-85dd-15b10cd5f728 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.326309] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a41422-c901-4eb3-b100-51cd3936db94 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.357112] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c4b24c-543c-4eae-9ea2-0a2ae9413245 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.364630] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca2590d-b4a1-4c0f-aa97-489cfd61ce3c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.377700] env[62476]: DEBUG nova.compute.provider_tree [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2301.387366] env[62476]: DEBUG nova.scheduler.client.report [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2301.400996] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.246s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2301.401472] env[62476]: DEBUG nova.compute.manager [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2301.437984] env[62476]: DEBUG nova.compute.utils [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2301.439576] env[62476]: DEBUG nova.compute.manager [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2301.439758] env[62476]: DEBUG nova.network.neutron [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2301.449213] env[62476]: DEBUG nova.compute.manager [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2301.524904] env[62476]: DEBUG nova.compute.manager [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2301.538561] env[62476]: DEBUG nova.policy [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3a117f106402424280e477babc21990c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f16c7f1cb3ec41ffbdd622e3ee5992ec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 2301.553378] env[62476]: DEBUG nova.virt.hardware [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2301.553634] env[62476]: DEBUG nova.virt.hardware [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2301.553792] env[62476]: DEBUG nova.virt.hardware [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2301.553975] env[62476]: DEBUG nova.virt.hardware [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2301.554141] env[62476]: DEBUG nova.virt.hardware [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2301.554293] env[62476]: DEBUG nova.virt.hardware [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2301.554532] env[62476]: DEBUG nova.virt.hardware [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2301.554728] env[62476]: DEBUG nova.virt.hardware [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2301.554976] env[62476]: DEBUG nova.virt.hardware [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2301.555247] env[62476]: DEBUG nova.virt.hardware [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2301.555493] env[62476]: DEBUG nova.virt.hardware [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2301.556521] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e176cd-fcd7-4254-8069-19b11694f5de {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.565623] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6e2774-b57a-4203-9d0e-cee71487692c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.960440] env[62476]: DEBUG nova.network.neutron [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Successfully created port: d2e54212-9451-44d9-b89a-59cbad710e47 {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2302.526122] env[62476]: DEBUG nova.compute.manager [req-e5895a32-614f-4277-99d5-53db3c2d6587 req-64a1fb16-0005-458f-bdce-bad71a79875b service nova] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Received event network-vif-plugged-d2e54212-9451-44d9-b89a-59cbad710e47 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2302.526122] env[62476]: DEBUG oslo_concurrency.lockutils [req-e5895a32-614f-4277-99d5-53db3c2d6587 req-64a1fb16-0005-458f-bdce-bad71a79875b service nova] Acquiring lock "3efb9939-d7ad-42dc-b3bc-472a2e34f7a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2302.526122] env[62476]: DEBUG oslo_concurrency.lockutils [req-e5895a32-614f-4277-99d5-53db3c2d6587 req-64a1fb16-0005-458f-bdce-bad71a79875b service nova] Lock "3efb9939-d7ad-42dc-b3bc-472a2e34f7a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2302.526122] env[62476]: DEBUG oslo_concurrency.lockutils [req-e5895a32-614f-4277-99d5-53db3c2d6587 req-64a1fb16-0005-458f-bdce-bad71a79875b service nova] Lock "3efb9939-d7ad-42dc-b3bc-472a2e34f7a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2302.526723] env[62476]: DEBUG nova.compute.manager [req-e5895a32-614f-4277-99d5-53db3c2d6587 req-64a1fb16-0005-458f-bdce-bad71a79875b service nova] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] No waiting events found dispatching network-vif-plugged-d2e54212-9451-44d9-b89a-59cbad710e47 {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2302.527045] env[62476]: WARNING nova.compute.manager [req-e5895a32-614f-4277-99d5-53db3c2d6587 req-64a1fb16-0005-458f-bdce-bad71a79875b service nova] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Received unexpected event network-vif-plugged-d2e54212-9451-44d9-b89a-59cbad710e47 for instance with vm_state building and task_state spawning. [ 2302.617265] env[62476]: DEBUG nova.network.neutron [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Successfully updated port: d2e54212-9451-44d9-b89a-59cbad710e47 {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2302.629758] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "refresh_cache-3efb9939-d7ad-42dc-b3bc-472a2e34f7a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2302.629758] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired lock "refresh_cache-3efb9939-d7ad-42dc-b3bc-472a2e34f7a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2302.629758] env[62476]: DEBUG nova.network.neutron [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2302.684614] env[62476]: DEBUG nova.network.neutron [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2303.161305] env[62476]: DEBUG nova.network.neutron [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Updating instance_info_cache with network_info: [{"id": "d2e54212-9451-44d9-b89a-59cbad710e47", "address": "fa:16:3e:3f:f9:f9", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2e54212-94", "ovs_interfaceid": "d2e54212-9451-44d9-b89a-59cbad710e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2303.175646] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Releasing lock "refresh_cache-3efb9939-d7ad-42dc-b3bc-472a2e34f7a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2303.175907] env[62476]: DEBUG nova.compute.manager [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Instance network_info: |[{"id": "d2e54212-9451-44d9-b89a-59cbad710e47", "address": "fa:16:3e:3f:f9:f9", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2e54212-94", "ovs_interfaceid": "d2e54212-9451-44d9-b89a-59cbad710e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2303.176375] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:f9:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3734b156-0f7d-4721-b23c-d000412ec2eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2e54212-9451-44d9-b89a-59cbad710e47', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2303.184018] env[62476]: DEBUG oslo.service.loopingcall [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2303.184528] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2303.184765] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e9cde0e-ec02-4a52-b62d-92f16396fafc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.208171] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2303.208171] env[62476]: value = "task-4319229" [ 2303.208171] env[62476]: _type = "Task" [ 2303.208171] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2303.217875] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319229, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2303.718353] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319229, 'name': CreateVM_Task, 'duration_secs': 0.292167} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2303.718668] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Created VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2303.719167] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2303.719336] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2303.719644] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2303.719895] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-129f234b-9852-4471-9886-8468255028db {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.725223] env[62476]: DEBUG oslo_vmware.api [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Waiting for the task: (returnval){ [ 2303.725223] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52c25d9a-80e8-0b55-aaa6-21ad692c5a8d" [ 2303.725223] env[62476]: _type = "Task" [ 2303.725223] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2303.733779] env[62476]: DEBUG oslo_vmware.api [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52c25d9a-80e8-0b55-aaa6-21ad692c5a8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2304.236651] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2304.236890] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Processing image 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2304.237129] env[62476]: DEBUG oslo_concurrency.lockutils [None req-d8e10f4c-d1df-44b2-8425-753532cbac83 tempest-DeleteServersTestJSON-487069151 tempest-DeleteServersTestJSON-487069151-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2304.555994] env[62476]: DEBUG nova.compute.manager [req-1c63e15e-c532-4570-94fa-c6378a2e4f2b req-01acf07b-7df6-44bb-9a49-33113ec511a9 service nova] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Received event network-changed-d2e54212-9451-44d9-b89a-59cbad710e47 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2304.556213] env[62476]: DEBUG nova.compute.manager [req-1c63e15e-c532-4570-94fa-c6378a2e4f2b req-01acf07b-7df6-44bb-9a49-33113ec511a9 service nova] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Refreshing instance network info cache due to event network-changed-d2e54212-9451-44d9-b89a-59cbad710e47. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2304.556425] env[62476]: DEBUG oslo_concurrency.lockutils [req-1c63e15e-c532-4570-94fa-c6378a2e4f2b req-01acf07b-7df6-44bb-9a49-33113ec511a9 service nova] Acquiring lock "refresh_cache-3efb9939-d7ad-42dc-b3bc-472a2e34f7a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2304.556570] env[62476]: DEBUG oslo_concurrency.lockutils [req-1c63e15e-c532-4570-94fa-c6378a2e4f2b req-01acf07b-7df6-44bb-9a49-33113ec511a9 service nova] Acquired lock "refresh_cache-3efb9939-d7ad-42dc-b3bc-472a2e34f7a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2304.556732] env[62476]: DEBUG nova.network.neutron [req-1c63e15e-c532-4570-94fa-c6378a2e4f2b req-01acf07b-7df6-44bb-9a49-33113ec511a9 service nova] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Refreshing network info cache for port d2e54212-9451-44d9-b89a-59cbad710e47 {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2304.845252] env[62476]: DEBUG nova.network.neutron [req-1c63e15e-c532-4570-94fa-c6378a2e4f2b req-01acf07b-7df6-44bb-9a49-33113ec511a9 service nova] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Updated VIF entry in instance network info cache for port d2e54212-9451-44d9-b89a-59cbad710e47. {{(pid=62476) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2304.845611] env[62476]: DEBUG nova.network.neutron [req-1c63e15e-c532-4570-94fa-c6378a2e4f2b req-01acf07b-7df6-44bb-9a49-33113ec511a9 service nova] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Updating instance_info_cache with network_info: [{"id": "d2e54212-9451-44d9-b89a-59cbad710e47", "address": "fa:16:3e:3f:f9:f9", "network": {"id": "416604e0-e2da-4194-9af3-62a02367c616", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1633447643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f16c7f1cb3ec41ffbdd622e3ee5992ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3734b156-0f7d-4721-b23c-d000412ec2eb", "external-id": "nsx-vlan-transportzone-560", "segmentation_id": 560, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2e54212-94", "ovs_interfaceid": "d2e54212-9451-44d9-b89a-59cbad710e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2304.855119] env[62476]: DEBUG oslo_concurrency.lockutils [req-1c63e15e-c532-4570-94fa-c6378a2e4f2b req-01acf07b-7df6-44bb-9a49-33113ec511a9 service nova] Releasing lock "refresh_cache-3efb9939-d7ad-42dc-b3bc-472a2e34f7a1" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2306.412491] env[62476]: INFO nova.compute.manager [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Rebuilding instance [ 2306.453260] env[62476]: DEBUG nova.compute.manager [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Checking state {{(pid=62476) _get_power_state /opt/stack/nova/nova/compute/manager.py:1783}} [ 2306.454169] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb05302-afd3-4394-8eb1-16b3eca900d5 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.495679] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Powering off the VM {{(pid=62476) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 2306.496261] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-94192b4f-f20d-4dd5-85d5-198c10fd6b37 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2306.504893] env[62476]: DEBUG oslo_vmware.api [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Waiting for the task: (returnval){ [ 2306.504893] env[62476]: value = "task-4319230" [ 2306.504893] env[62476]: _type = "Task" [ 2306.504893] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2306.514923] env[62476]: DEBUG oslo_vmware.api [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319230, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2307.016868] env[62476]: DEBUG oslo_vmware.api [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319230, 'name': PowerOffVM_Task, 'duration_secs': 0.207549} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2307.017154] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Powered off the VM {{(pid=62476) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 2307.017898] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Powering off the VM {{(pid=62476) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 2307.018166] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-204e1676-1760-442f-abb7-40a08635c854 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.025457] env[62476]: DEBUG oslo_vmware.api [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Waiting for the task: (returnval){ [ 2307.025457] env[62476]: value = "task-4319231" [ 2307.025457] env[62476]: _type = "Task" [ 2307.025457] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2307.033396] env[62476]: DEBUG oslo_vmware.api [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319231, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2307.536630] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] VM already powered off {{(pid=62476) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 2307.536888] env[62476]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Volume detach. Driver type: vmdk {{(pid=62476) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2307.537107] env[62476]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-849589', 'volume_id': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'name': 'volume-e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '653f1f54-0432-4138-a577-259fbaa16cc5', 'attached_at': '', 'detached_at': '', 'volume_id': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'serial': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720'} {{(pid=62476) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2307.537842] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3652cd7-7e97-45ba-b0c3-02bdcf87af6b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.556375] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a46e086-ec7d-4b79-94d4-a026461c7982 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.564045] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89cbce90-0064-4c96-80c9-5bcfbe02014e {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.583039] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fda58b6-3ef4-4a16-a07a-a84061fb0600 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.598324] env[62476]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] The volume has not been displaced from its original location: [datastore1] volume-e6704a3c-b270-4e73-8ffe-180e2c5cc720/volume-e6704a3c-b270-4e73-8ffe-180e2c5cc720.vmdk. No consolidation needed. {{(pid=62476) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2307.603714] env[62476]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Reconfiguring VM instance instance-0000005c to detach disk 2000 {{(pid=62476) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2307.604058] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21a43bfd-9af3-4c63-b7df-406dd18b3775 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.622735] env[62476]: DEBUG oslo_vmware.api [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Waiting for the task: (returnval){ [ 2307.622735] env[62476]: value = "task-4319232" [ 2307.622735] env[62476]: _type = "Task" [ 2307.622735] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2307.631431] env[62476]: DEBUG oslo_vmware.api [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319232, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2308.134148] env[62476]: DEBUG oslo_vmware.api [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319232, 'name': ReconfigVM_Task, 'duration_secs': 0.208626} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2308.134338] env[62476]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Reconfigured VM instance instance-0000005c to detach disk 2000 {{(pid=62476) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2308.138973] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ef61c59-1cdf-4ad4-a36a-5611bef7a96c {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.154918] env[62476]: DEBUG oslo_vmware.api [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Waiting for the task: (returnval){ [ 2308.154918] env[62476]: value = "task-4319233" [ 2308.154918] env[62476]: _type = "Task" [ 2308.154918] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2308.163603] env[62476]: DEBUG oslo_vmware.api [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319233, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2308.664763] env[62476]: DEBUG oslo_vmware.api [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319233, 'name': ReconfigVM_Task, 'duration_secs': 0.114527} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2308.665079] env[62476]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-849589', 'volume_id': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'name': 'volume-e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '653f1f54-0432-4138-a577-259fbaa16cc5', 'attached_at': '', 'detached_at': '', 'volume_id': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720', 'serial': 'e6704a3c-b270-4e73-8ffe-180e2c5cc720'} {{(pid=62476) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2308.665358] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2308.666172] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36bc42de-c047-4870-80a1-5add833d1b2a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.672885] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2308.673141] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a48504e8-dd3b-4efd-a8d7-7ddb19283eb0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.732645] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2308.732895] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2308.733108] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Deleting the datastore file [datastore1] 653f1f54-0432-4138-a577-259fbaa16cc5 {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2308.733430] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c4ec13d-5464-4133-8be4-4c18ca67ebad {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.740419] env[62476]: DEBUG oslo_vmware.api [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Waiting for the task: (returnval){ [ 2308.740419] env[62476]: value = "task-4319235" [ 2308.740419] env[62476]: _type = "Task" [ 2308.740419] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2308.750279] env[62476]: DEBUG oslo_vmware.api [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2309.254473] env[62476]: DEBUG oslo_vmware.api [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Task: {'id': task-4319235, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08444} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2309.254473] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2309.254473] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2309.254473] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2309.313226] env[62476]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Volume detach. Driver type: vmdk {{(pid=62476) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2309.313600] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32dbde43-51a0-419e-bed8-6104948e3217 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.322709] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67000580-e6d3-46b6-8c8d-68099f1ed520 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.352434] env[62476]: ERROR nova.compute.manager [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Failed to detach volume e6704a3c-b270-4e73-8ffe-180e2c5cc720 from /dev/sda: nova.exception.InstanceNotFound: Instance 653f1f54-0432-4138-a577-259fbaa16cc5 could not be found. [ 2309.352434] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Traceback (most recent call last): [ 2309.352434] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/compute/manager.py", line 4134, in _do_rebuild_instance [ 2309.352434] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] self.driver.rebuild(**kwargs) [ 2309.352434] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/virt/driver.py", line 390, in rebuild [ 2309.352434] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] raise NotImplementedError() [ 2309.352434] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] NotImplementedError [ 2309.352434] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] [ 2309.352434] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] During handling of the above exception, another exception occurred: [ 2309.352434] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] [ 2309.352434] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Traceback (most recent call last): [ 2309.352434] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/compute/manager.py", line 3557, in _detach_root_volume [ 2309.352434] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] self.driver.detach_volume(context, old_connection_info, [ 2309.353124] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 552, in detach_volume [ 2309.353124] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] return self._volumeops.detach_volume(connection_info, instance) [ 2309.353124] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 2309.353124] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] self._detach_volume_vmdk(connection_info, instance) [ 2309.353124] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 2309.353124] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 2309.353124] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 2309.353124] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] stable_ref.fetch_moref(session) [ 2309.353124] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 2309.353124] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] raise exception.InstanceNotFound(instance_id=self._uuid) [ 2309.353124] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] nova.exception.InstanceNotFound: Instance 653f1f54-0432-4138-a577-259fbaa16cc5 could not be found. [ 2309.353124] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] [ 2309.503021] env[62476]: DEBUG nova.compute.utils [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Build of instance 653f1f54-0432-4138-a577-259fbaa16cc5 aborted: Failed to rebuild volume backed instance. {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2309.505516] env[62476]: ERROR nova.compute.manager [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 653f1f54-0432-4138-a577-259fbaa16cc5 aborted: Failed to rebuild volume backed instance. [ 2309.505516] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Traceback (most recent call last): [ 2309.505516] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/compute/manager.py", line 4134, in _do_rebuild_instance [ 2309.505516] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] self.driver.rebuild(**kwargs) [ 2309.505516] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/virt/driver.py", line 390, in rebuild [ 2309.505516] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] raise NotImplementedError() [ 2309.505516] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] NotImplementedError [ 2309.505516] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] [ 2309.505516] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] During handling of the above exception, another exception occurred: [ 2309.505516] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] [ 2309.505516] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Traceback (most recent call last): [ 2309.505516] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/compute/manager.py", line 3592, in _rebuild_volume_backed_instance [ 2309.505884] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] self._detach_root_volume(context, instance, root_bdm) [ 2309.505884] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/compute/manager.py", line 3571, in _detach_root_volume [ 2309.505884] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] with excutils.save_and_reraise_exception(): [ 2309.505884] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2309.505884] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] self.force_reraise() [ 2309.505884] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2309.505884] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] raise self.value [ 2309.505884] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/compute/manager.py", line 3557, in _detach_root_volume [ 2309.505884] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] self.driver.detach_volume(context, old_connection_info, [ 2309.505884] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 552, in detach_volume [ 2309.505884] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] return self._volumeops.detach_volume(connection_info, instance) [ 2309.505884] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 2309.505884] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] self._detach_volume_vmdk(connection_info, instance) [ 2309.506253] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 2309.506253] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 2309.506253] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 2309.506253] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] stable_ref.fetch_moref(session) [ 2309.506253] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 2309.506253] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] raise exception.InstanceNotFound(instance_id=self._uuid) [ 2309.506253] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] nova.exception.InstanceNotFound: Instance 653f1f54-0432-4138-a577-259fbaa16cc5 could not be found. [ 2309.506253] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] [ 2309.506253] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] During handling of the above exception, another exception occurred: [ 2309.506253] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] [ 2309.506253] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Traceback (most recent call last): [ 2309.506253] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/compute/manager.py", line 10888, in _error_out_instance_on_exception [ 2309.506253] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] yield [ 2309.506253] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/compute/manager.py", line 3860, in rebuild_instance [ 2309.506606] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] self._do_rebuild_instance_with_claim( [ 2309.506606] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/compute/manager.py", line 3946, in _do_rebuild_instance_with_claim [ 2309.506606] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] self._do_rebuild_instance( [ 2309.506606] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/compute/manager.py", line 4138, in _do_rebuild_instance [ 2309.506606] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] self._rebuild_default_impl(**kwargs) [ 2309.506606] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/compute/manager.py", line 3715, in _rebuild_default_impl [ 2309.506606] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] self._rebuild_volume_backed_instance( [ 2309.506606] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] File "/opt/stack/nova/nova/compute/manager.py", line 3607, in _rebuild_volume_backed_instance [ 2309.506606] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] raise exception.BuildAbortException( [ 2309.506606] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] nova.exception.BuildAbortException: Build of instance 653f1f54-0432-4138-a577-259fbaa16cc5 aborted: Failed to rebuild volume backed instance. [ 2309.506606] env[62476]: ERROR nova.compute.manager [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] [ 2309.599529] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2309.599787] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2309.700554] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0849c0c9-d4d1-4f3a-9af8-d1a4dcb6dc82 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.709082] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee054651-0dc6-424a-8c20-a1428e112273 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.740542] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092d7cda-e984-4438-9cf6-29f8efbde1d3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.748724] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a56a2fc-f477-4251-ae28-c4aaf2863c79 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.762834] env[62476]: DEBUG nova.compute.provider_tree [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2309.771596] env[62476]: DEBUG nova.scheduler.client.report [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2309.796806] env[62476]: DEBUG oslo_concurrency.lockutils [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.197s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2309.797050] env[62476]: INFO nova.compute.manager [None req-dd98b074-e911-4d01-854c-6e490667c308 tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Successfully reverted task state from rebuilding on failure for instance. [ 2310.182625] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Acquiring lock "653f1f54-0432-4138-a577-259fbaa16cc5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2310.182888] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Lock "653f1f54-0432-4138-a577-259fbaa16cc5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2310.183153] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Acquiring lock "653f1f54-0432-4138-a577-259fbaa16cc5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2310.183295] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Lock "653f1f54-0432-4138-a577-259fbaa16cc5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2310.183460] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Lock "653f1f54-0432-4138-a577-259fbaa16cc5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2310.185460] env[62476]: INFO nova.compute.manager [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Terminating instance [ 2310.187481] env[62476]: DEBUG nova.compute.manager [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2310.187881] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-271c75df-32b7-41c8-955c-abe9ed5f1874 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.197456] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf781d1-c146-4c9a-9bae-54d3fa1f755a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.225597] env[62476]: WARNING nova.virt.vmwareapi.driver [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 653f1f54-0432-4138-a577-259fbaa16cc5 could not be found. [ 2310.225824] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2310.226152] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8d432a9d-dc25-4c73-bda0-2c9aa98044fb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.234248] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3850b210-d662-417e-a67f-feb11f1940bc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.261672] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 653f1f54-0432-4138-a577-259fbaa16cc5 could not be found. [ 2310.261879] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2310.262069] env[62476]: INFO nova.compute.manager [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Took 0.07 seconds to destroy the instance on the hypervisor. [ 2310.262331] env[62476]: DEBUG oslo.service.loopingcall [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2310.262557] env[62476]: DEBUG nova.compute.manager [-] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2310.262651] env[62476]: DEBUG nova.network.neutron [-] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2310.858884] env[62476]: DEBUG nova.network.neutron [-] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2310.875827] env[62476]: DEBUG nova.compute.manager [req-5a72f70f-93fc-414d-9834-7abdb65a3d6c req-df0f7421-69c9-49b1-8faf-3f2d38e1726c service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Received event network-vif-deleted-c1de03a6-b51f-4cc8-ab89-977de4ab8ad3 {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2310.876130] env[62476]: INFO nova.compute.manager [req-5a72f70f-93fc-414d-9834-7abdb65a3d6c req-df0f7421-69c9-49b1-8faf-3f2d38e1726c service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Neutron deleted interface c1de03a6-b51f-4cc8-ab89-977de4ab8ad3; detaching it from the instance and deleting it from the info cache [ 2310.876353] env[62476]: DEBUG nova.network.neutron [req-5a72f70f-93fc-414d-9834-7abdb65a3d6c req-df0f7421-69c9-49b1-8faf-3f2d38e1726c service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2310.879506] env[62476]: INFO nova.compute.manager [-] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Took 0.62 seconds to deallocate network for instance. [ 2310.887221] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e7e75e51-4f37-420d-85ec-a41d1c6e359a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.896889] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8e6f68-8089-40ac-9f10-39bc1e7956dd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.925562] env[62476]: DEBUG nova.compute.manager [req-5a72f70f-93fc-414d-9834-7abdb65a3d6c req-df0f7421-69c9-49b1-8faf-3f2d38e1726c service nova] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Detach interface failed, port_id=c1de03a6-b51f-4cc8-ab89-977de4ab8ad3, reason: Instance 653f1f54-0432-4138-a577-259fbaa16cc5 could not be found. {{(pid=62476) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10988}} [ 2310.992112] env[62476]: INFO nova.compute.manager [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Took 0.11 seconds to detach 1 volumes for instance. [ 2310.996675] env[62476]: DEBUG nova.compute.manager [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Deleting volume: e6704a3c-b270-4e73-8ffe-180e2c5cc720 {{(pid=62476) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3239}} [ 2311.089423] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2311.089689] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2311.089995] env[62476]: DEBUG nova.objects.instance [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Lazy-loading 'resources' on Instance uuid 653f1f54-0432-4138-a577-259fbaa16cc5 {{(pid=62476) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2311.227748] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35088605-cb82-4afa-b8a9-bc8b4f4eb179 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.236523] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc70358-ac07-4c8f-a66d-a869a6d15b13 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.269934] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4615c060-2af8-4893-96ca-27a22120cf2b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.278422] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34dde0c6-c97f-4725-a583-3b517fce0901 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.293747] env[62476]: DEBUG nova.compute.provider_tree [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2311.303213] env[62476]: DEBUG nova.scheduler.client.report [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2311.335564] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.246s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2311.398679] env[62476]: DEBUG oslo_concurrency.lockutils [None req-b969d356-2218-40a9-adef-58eca183cdab tempest-ServerActionsV293TestJSON-1649936905 tempest-ServerActionsV293TestJSON-1649936905-project-member] Lock "653f1f54-0432-4138-a577-259fbaa16cc5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.216s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2318.229516] env[62476]: DEBUG oslo_concurrency.lockutils [None req-fca7a2d8-ea30-4f44-8047-db6464da2602 tempest-ServersTestJSON-1286035361 tempest-ServersTestJSON-1286035361-project-member] Acquiring lock "63657e6f-8e2e-41e0-ad6e-2a13a90bf7de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2325.591238] env[62476]: WARNING oslo_vmware.rw_handles [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2325.591238] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2325.591238] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2325.591238] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2325.591238] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2325.591238] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 2325.591238] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2325.591238] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2325.591238] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2325.591238] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2325.591238] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2325.591238] env[62476]: ERROR oslo_vmware.rw_handles [ 2325.591973] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/97a2753d-3396-4ecb-a961-36cd03f0e197/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2325.593960] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2325.594326] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Copying Virtual Disk [datastore1] vmware_temp/97a2753d-3396-4ecb-a961-36cd03f0e197/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/97a2753d-3396-4ecb-a961-36cd03f0e197/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2325.594721] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3042c9bc-1b51-47d3-acc3-15684247d76a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.604465] env[62476]: DEBUG oslo_vmware.api [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Waiting for the task: (returnval){ [ 2325.604465] env[62476]: value = "task-4319237" [ 2325.604465] env[62476]: _type = "Task" [ 2325.604465] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2325.613091] env[62476]: DEBUG oslo_vmware.api [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Task: {'id': task-4319237, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.115137] env[62476]: DEBUG oslo_vmware.exceptions [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2326.115402] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2326.115979] env[62476]: ERROR nova.compute.manager [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2326.115979] env[62476]: Faults: ['InvalidArgument'] [ 2326.115979] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Traceback (most recent call last): [ 2326.115979] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2326.115979] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] yield resources [ 2326.115979] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2326.115979] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] self.driver.spawn(context, instance, image_meta, [ 2326.115979] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2326.115979] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2326.115979] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2326.115979] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] self._fetch_image_if_missing(context, vi) [ 2326.115979] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2326.116457] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] image_cache(vi, tmp_image_ds_loc) [ 2326.116457] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2326.116457] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] vm_util.copy_virtual_disk( [ 2326.116457] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2326.116457] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] session._wait_for_task(vmdk_copy_task) [ 2326.116457] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2326.116457] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] return self.wait_for_task(task_ref) [ 2326.116457] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2326.116457] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] return evt.wait() [ 2326.116457] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2326.116457] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] result = hub.switch() [ 2326.116457] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2326.116457] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] return self.greenlet.switch() [ 2326.116811] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2326.116811] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] self.f(*self.args, **self.kw) [ 2326.116811] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2326.116811] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] raise exceptions.translate_fault(task_info.error) [ 2326.116811] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2326.116811] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Faults: ['InvalidArgument'] [ 2326.116811] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] [ 2326.116811] env[62476]: INFO nova.compute.manager [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Terminating instance [ 2326.117920] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2326.118186] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2326.118441] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81a9635e-52d8-4dfc-a151-7e7682e7493d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.122115] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquiring lock "refresh_cache-2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2326.122283] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquired lock "refresh_cache-2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2326.122457] env[62476]: DEBUG nova.network.neutron [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2326.134716] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2326.134903] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2326.135920] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31a25d4c-6181-414d-91ac-76d516d599ae {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.141713] env[62476]: DEBUG oslo_vmware.api [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Waiting for the task: (returnval){ [ 2326.141713] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52fa73e1-b14e-856f-447c-5e0bdd1cc08d" [ 2326.141713] env[62476]: _type = "Task" [ 2326.141713] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2326.150538] env[62476]: DEBUG oslo_vmware.api [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52fa73e1-b14e-856f-447c-5e0bdd1cc08d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.172333] env[62476]: DEBUG nova.network.neutron [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2326.255175] env[62476]: DEBUG nova.network.neutron [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2326.264069] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Releasing lock "refresh_cache-2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2326.264504] env[62476]: DEBUG nova.compute.manager [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2326.264699] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2326.265864] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078a5f64-4b11-4ec2-93ee-d9aa8c9ae6ba {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.275757] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2326.276012] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5de711b-f293-4301-90a5-2668d4afd531 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.311268] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2326.311552] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2326.311743] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Deleting the datastore file [datastore1] 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2326.312031] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-621c2a85-b7da-4b4c-84f2-7313e5a05637 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.318536] env[62476]: DEBUG oslo_vmware.api [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Waiting for the task: (returnval){ [ 2326.318536] env[62476]: value = "task-4319239" [ 2326.318536] env[62476]: _type = "Task" [ 2326.318536] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2326.326725] env[62476]: DEBUG oslo_vmware.api [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Task: {'id': task-4319239, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.652345] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2326.652685] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Creating directory with path [datastore1] vmware_temp/d3c12ab5-1615-4c74-b8f9-3d8afe381210/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2326.652854] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1102d4e-df13-4efc-bccd-4c7a2eeac971 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.666906] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Created directory with path [datastore1] vmware_temp/d3c12ab5-1615-4c74-b8f9-3d8afe381210/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2326.667112] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Fetch image to [datastore1] vmware_temp/d3c12ab5-1615-4c74-b8f9-3d8afe381210/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2326.667285] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/d3c12ab5-1615-4c74-b8f9-3d8afe381210/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2326.668114] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee372fc-ac63-49fa-8ca2-5a0c430b0596 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.675081] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286a98f4-213d-41e9-b308-eea5efb36961 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.684529] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797ee6c6-62c3-4948-9350-7d9d82e40a29 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.714910] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1084a0-df83-4042-9942-3ad8f6566830 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.721017] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f55df57a-7c20-4456-b588-8b1ee7073647 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.740599] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2326.792106] env[62476]: DEBUG oslo_vmware.rw_handles [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d3c12ab5-1615-4c74-b8f9-3d8afe381210/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2326.852264] env[62476]: DEBUG oslo_vmware.rw_handles [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2326.852460] env[62476]: DEBUG oslo_vmware.rw_handles [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d3c12ab5-1615-4c74-b8f9-3d8afe381210/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2326.855842] env[62476]: DEBUG oslo_vmware.api [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Task: {'id': task-4319239, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.060238} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2326.856140] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2326.856334] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2326.856507] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2326.856708] env[62476]: INFO nova.compute.manager [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2326.856963] env[62476]: DEBUG oslo.service.loopingcall [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2326.857361] env[62476]: DEBUG nova.compute.manager [-] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Skipping network deallocation for instance since networking was not requested. {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2326.859581] env[62476]: DEBUG nova.compute.claims [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2326.859777] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2326.860034] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2327.004531] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79470e0-a6c2-4bc2-af1f-27ec28e2c22f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.013731] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc98a911-1d85-4d0b-9268-5800cf57f716 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.042681] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2327.044759] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e16628f-7076-4e6c-aeaa-7edaf9a7eb4b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.052296] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e50d3dc-5092-4efa-9e7d-07ce0feccb82 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.057322] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2327.067503] env[62476]: DEBUG nova.compute.provider_tree [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2327.075716] env[62476]: DEBUG nova.scheduler.client.report [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2327.090897] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.231s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2327.091433] env[62476]: ERROR nova.compute.manager [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2327.091433] env[62476]: Faults: ['InvalidArgument'] [ 2327.091433] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Traceback (most recent call last): [ 2327.091433] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2327.091433] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] self.driver.spawn(context, instance, image_meta, [ 2327.091433] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2327.091433] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2327.091433] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2327.091433] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] self._fetch_image_if_missing(context, vi) [ 2327.091433] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2327.091433] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] image_cache(vi, tmp_image_ds_loc) [ 2327.091433] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2327.091737] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] vm_util.copy_virtual_disk( [ 2327.091737] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2327.091737] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] session._wait_for_task(vmdk_copy_task) [ 2327.091737] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2327.091737] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] return self.wait_for_task(task_ref) [ 2327.091737] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2327.091737] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] return evt.wait() [ 2327.091737] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2327.091737] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] result = hub.switch() [ 2327.091737] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2327.091737] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] return self.greenlet.switch() [ 2327.091737] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2327.091737] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] self.f(*self.args, **self.kw) [ 2327.092077] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2327.092077] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] raise exceptions.translate_fault(task_info.error) [ 2327.092077] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2327.092077] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Faults: ['InvalidArgument'] [ 2327.092077] env[62476]: ERROR nova.compute.manager [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] [ 2327.092273] env[62476]: DEBUG nova.compute.utils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2327.093157] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.036s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2327.093342] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2327.093497] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2327.094162] env[62476]: DEBUG nova.compute.manager [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Build of instance 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc was re-scheduled: A specified parameter was not correct: fileType [ 2327.094162] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2327.094537] env[62476]: DEBUG nova.compute.manager [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2327.094784] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquiring lock "refresh_cache-2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2327.094934] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquired lock "refresh_cache-2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2327.095109] env[62476]: DEBUG nova.network.neutron [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2327.096508] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6966dd40-114c-4129-a4a6-5177bca480d1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.107197] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0c3aad-7d70-4382-a113-92a586dbef38 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.122421] env[62476]: DEBUG nova.network.neutron [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2327.124597] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8059673-9109-4232-ac6c-20670ad45313 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.131999] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237d18c6-6ec3-403c-8a9f-1723b329ec67 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.164613] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180681MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2327.164769] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2327.164946] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2327.213352] env[62476]: DEBUG nova.network.neutron [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2327.221259] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3462762c-09da-473b-b2ba-4dce6c99dd8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2327.223323] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Releasing lock "refresh_cache-2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2327.223536] env[62476]: DEBUG nova.compute.manager [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2327.223713] env[62476]: DEBUG nova.compute.manager [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Skipping network deallocation for instance since networking was not requested. {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2327.233030] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2327.233030] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 8fdd45f2-0c21-461f-896e-698182bd5337 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2327.233030] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 11af6076-e985-477c-98a6-437843b26b02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2327.233030] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 56a5da15-57da-4d4d-a359-d90b780f67e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2327.233249] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2327.233249] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2327.233249] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2327.233249] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=100GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2327.332727] env[62476]: INFO nova.scheduler.client.report [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Deleted allocations for instance 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc [ 2327.339704] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a55c3d-eb7e-474c-972e-a11d9259e0c6 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.348455] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc260a13-20cb-4a27-908e-852a621223fd {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.380477] env[62476]: DEBUG oslo_concurrency.lockutils [None req-5c9d190a-2c52-4696-a082-769aa430e299 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Lock "2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 555.672s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2327.381218] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aae5512-b59a-4baa-99b6-ecc69e5dccd8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.383806] env[62476]: DEBUG oslo_concurrency.lockutils [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Lock "2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 359.211s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2327.384049] env[62476]: DEBUG oslo_concurrency.lockutils [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquiring lock "2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2327.384324] env[62476]: DEBUG oslo_concurrency.lockutils [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Lock "2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2327.384507] env[62476]: DEBUG oslo_concurrency.lockutils [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Lock "2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2327.386410] env[62476]: INFO nova.compute.manager [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Terminating instance [ 2327.388239] env[62476]: DEBUG oslo_concurrency.lockutils [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquiring lock "refresh_cache-2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2327.388426] env[62476]: DEBUG oslo_concurrency.lockutils [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Acquired lock "refresh_cache-2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2327.388611] env[62476]: DEBUG nova.network.neutron [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2327.394542] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c53686c-d6bb-42ca-ab04-047763d1376d {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.410936] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2327.419275] env[62476]: DEBUG nova.network.neutron [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2327.422196] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2327.437286] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2327.437472] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.273s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2327.493716] env[62476]: DEBUG nova.network.neutron [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2327.502828] env[62476]: DEBUG oslo_concurrency.lockutils [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Releasing lock "refresh_cache-2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2327.503254] env[62476]: DEBUG nova.compute.manager [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2327.503446] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2327.503939] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6c3e873-9179-4fa2-8481-e2635a4e7462 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.513503] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4c0802-3929-43e6-a3f9-84e5d12016e3 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.540650] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc could not be found. [ 2327.540848] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2327.541019] env[62476]: INFO nova.compute.manager [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2327.541289] env[62476]: DEBUG oslo.service.loopingcall [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2327.541502] env[62476]: DEBUG nova.compute.manager [-] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2327.541603] env[62476]: DEBUG nova.network.neutron [-] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2327.558781] env[62476]: DEBUG nova.network.neutron [-] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2327.567160] env[62476]: DEBUG nova.network.neutron [-] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2327.575276] env[62476]: INFO nova.compute.manager [-] [instance: 2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc] Took 0.03 seconds to deallocate network for instance. [ 2327.677008] env[62476]: DEBUG oslo_concurrency.lockutils [None req-779d48b2-9712-4546-bbac-7d732976c125 tempest-ServerShowV254Test-740195172 tempest-ServerShowV254Test-740195172-project-member] Lock "2825c1dd-8f7c-4ee8-8bfa-5e221057f4fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.293s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2328.422456] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2330.026736] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2332.027561] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2332.027917] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2332.027917] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2332.043785] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2332.043946] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2332.044089] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 11af6076-e985-477c-98a6-437843b26b02] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2332.044223] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2332.044349] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2332.044474] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2332.044597] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2332.045115] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2332.045312] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2334.041479] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2337.027122] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2339.028151] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2340.027604] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2373.314382] env[62476]: WARNING oslo_vmware.rw_handles [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2373.314382] env[62476]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2373.314382] env[62476]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2373.314382] env[62476]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2373.314382] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2373.314382] env[62476]: ERROR oslo_vmware.rw_handles response.begin() [ 2373.314382] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2373.314382] env[62476]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2373.314382] env[62476]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2373.314382] env[62476]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2373.314382] env[62476]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2373.314382] env[62476]: ERROR oslo_vmware.rw_handles [ 2373.315289] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Downloaded image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to vmware_temp/d3c12ab5-1615-4c74-b8f9-3d8afe381210/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2373.317105] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Caching image {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2373.317429] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Copying Virtual Disk [datastore1] vmware_temp/d3c12ab5-1615-4c74-b8f9-3d8afe381210/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk to [datastore1] vmware_temp/d3c12ab5-1615-4c74-b8f9-3d8afe381210/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk {{(pid=62476) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2373.317872] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d2166b0-ecc1-4a61-a580-e050e00eb0bb {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.327717] env[62476]: DEBUG oslo_vmware.api [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Waiting for the task: (returnval){ [ 2373.327717] env[62476]: value = "task-4319240" [ 2373.327717] env[62476]: _type = "Task" [ 2373.327717] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.336768] env[62476]: DEBUG oslo_vmware.api [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Task: {'id': task-4319240, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.837713] env[62476]: DEBUG oslo_vmware.exceptions [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Fault InvalidArgument not matched. {{(pid=62476) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2373.838026] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2373.838622] env[62476]: ERROR nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2373.838622] env[62476]: Faults: ['InvalidArgument'] [ 2373.838622] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Traceback (most recent call last): [ 2373.838622] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2373.838622] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] yield resources [ 2373.838622] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2373.838622] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] self.driver.spawn(context, instance, image_meta, [ 2373.838622] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2373.838622] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2373.838622] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2373.838622] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] self._fetch_image_if_missing(context, vi) [ 2373.838622] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2373.838942] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] image_cache(vi, tmp_image_ds_loc) [ 2373.838942] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2373.838942] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] vm_util.copy_virtual_disk( [ 2373.838942] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2373.838942] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] session._wait_for_task(vmdk_copy_task) [ 2373.838942] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2373.838942] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] return self.wait_for_task(task_ref) [ 2373.838942] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2373.838942] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] return evt.wait() [ 2373.838942] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2373.838942] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] result = hub.switch() [ 2373.838942] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2373.838942] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] return self.greenlet.switch() [ 2373.839385] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2373.839385] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] self.f(*self.args, **self.kw) [ 2373.839385] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2373.839385] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] raise exceptions.translate_fault(task_info.error) [ 2373.839385] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2373.839385] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Faults: ['InvalidArgument'] [ 2373.839385] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] [ 2373.839385] env[62476]: INFO nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Terminating instance [ 2373.840618] env[62476]: DEBUG oslo_concurrency.lockutils [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7.vmdk" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2373.840853] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2373.841154] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-156edb31-1305-4c46-a1cf-c2cb9c90a99a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.843562] env[62476]: DEBUG nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2373.843804] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2373.844558] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18604646-336a-4d52-aa9b-3266844decf7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.851984] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Unregistering the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2373.852256] env[62476]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55938c1d-8bf7-4804-803b-321d50f6c0b7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.854743] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2373.855206] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62476) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2373.855980] env[62476]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c89467df-681c-4b9e-a665-7d78d0262586 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.861553] env[62476]: DEBUG oslo_vmware.api [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Waiting for the task: (returnval){ [ 2373.861553] env[62476]: value = "session[527a65ce-4960-617f-7fd9-0737138df61f]52d472cf-8a2f-c5cf-778e-acaeb607e322" [ 2373.861553] env[62476]: _type = "Task" [ 2373.861553] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.871168] env[62476]: DEBUG oslo_vmware.api [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Task: {'id': session[527a65ce-4960-617f-7fd9-0737138df61f]52d472cf-8a2f-c5cf-778e-acaeb607e322, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.929531] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Unregistered the VM {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2373.929794] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Deleting contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2373.929938] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Deleting the datastore file [datastore1] 3462762c-09da-473b-b2ba-4dce6c99dd8d {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2373.930268] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0277c01b-a663-4db7-b123-542553d56966 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.937086] env[62476]: DEBUG oslo_vmware.api [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Waiting for the task: (returnval){ [ 2373.937086] env[62476]: value = "task-4319242" [ 2373.937086] env[62476]: _type = "Task" [ 2373.937086] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2373.946543] env[62476]: DEBUG oslo_vmware.api [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Task: {'id': task-4319242, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2374.373073] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Preparing fetch location {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2374.373497] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Creating directory with path [datastore1] vmware_temp/52a5e354-4078-4eaa-ac46-1ae7cdf8bea7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2374.373555] env[62476]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fea7d2d8-9973-447f-9473-eaee6bbb366f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.387093] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Created directory with path [datastore1] vmware_temp/52a5e354-4078-4eaa-ac46-1ae7cdf8bea7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 {{(pid=62476) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2374.387296] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Fetch image to [datastore1] vmware_temp/52a5e354-4078-4eaa-ac46-1ae7cdf8bea7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk {{(pid=62476) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2374.387473] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to [datastore1] vmware_temp/52a5e354-4078-4eaa-ac46-1ae7cdf8bea7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk on the data store datastore1 {{(pid=62476) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2374.388272] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8baa641c-62f1-49a5-99b2-0b4ea4437613 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.395382] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdab4c5d-6df7-4d33-81d5-e127e41440a9 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.404713] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6979611-cf19-4668-8b65-656ab8c132dc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.436462] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f9fc86-51c5-4cdb-947b-3818bb8ca3a4 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.448346] env[62476]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-83acb7f9-7b58-404a-a2c5-9a0f0fc4308f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.450222] env[62476]: DEBUG oslo_vmware.api [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Task: {'id': task-4319242, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068216} completed successfully. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2374.450475] env[62476]: DEBUG nova.virt.vmwareapi.ds_util [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Deleted the datastore file {{(pid=62476) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2374.450648] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Deleted contents of the VM from datastore datastore1 {{(pid=62476) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2374.450863] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2374.450993] env[62476]: INFO nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2374.453489] env[62476]: DEBUG nova.compute.claims [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Aborting claim: {{(pid=62476) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2374.453665] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2374.453940] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2374.476647] env[62476]: DEBUG nova.virt.vmwareapi.images [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Downloading image file data 3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7 to the data store datastore1 {{(pid=62476) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2374.534500] env[62476]: DEBUG oslo_vmware.rw_handles [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/52a5e354-4078-4eaa-ac46-1ae7cdf8bea7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2374.595112] env[62476]: DEBUG oslo_vmware.rw_handles [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Completed reading data from the image iterator. {{(pid=62476) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2374.595321] env[62476]: DEBUG oslo_vmware.rw_handles [None req-15963710-661c-4823-8b2d-37674a561746 tempest-ImagesTestJSON-1877863561 tempest-ImagesTestJSON-1877863561-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/52a5e354-4078-4eaa-ac46-1ae7cdf8bea7/3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62476) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2374.672342] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e0e6dc-126c-4459-a3f2-d771c189bfb7 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.680577] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c38a44-8ce8-4797-ab6d-587265624e3b {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.714440] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8a6cf9-0bd8-4b97-a9e9-3278384a4280 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.722252] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa4a47e-4a70-4d93-a2e4-7c399a175d75 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.735891] env[62476]: DEBUG nova.compute.provider_tree [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2374.745017] env[62476]: DEBUG nova.scheduler.client.report [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2374.759492] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.305s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2374.760103] env[62476]: ERROR nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2374.760103] env[62476]: Faults: ['InvalidArgument'] [ 2374.760103] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Traceback (most recent call last): [ 2374.760103] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2374.760103] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] self.driver.spawn(context, instance, image_meta, [ 2374.760103] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2374.760103] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2374.760103] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2374.760103] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] self._fetch_image_if_missing(context, vi) [ 2374.760103] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2374.760103] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] image_cache(vi, tmp_image_ds_loc) [ 2374.760103] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2374.760492] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] vm_util.copy_virtual_disk( [ 2374.760492] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2374.760492] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] session._wait_for_task(vmdk_copy_task) [ 2374.760492] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2374.760492] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] return self.wait_for_task(task_ref) [ 2374.760492] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2374.760492] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] return evt.wait() [ 2374.760492] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2374.760492] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] result = hub.switch() [ 2374.760492] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2374.760492] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] return self.greenlet.switch() [ 2374.760492] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2374.760492] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] self.f(*self.args, **self.kw) [ 2374.760906] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2374.760906] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] raise exceptions.translate_fault(task_info.error) [ 2374.760906] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2374.760906] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Faults: ['InvalidArgument'] [ 2374.760906] env[62476]: ERROR nova.compute.manager [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] [ 2374.760906] env[62476]: DEBUG nova.compute.utils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] VimFaultException {{(pid=62476) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2374.762322] env[62476]: DEBUG nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Build of instance 3462762c-09da-473b-b2ba-4dce6c99dd8d was re-scheduled: A specified parameter was not correct: fileType [ 2374.762322] env[62476]: Faults: ['InvalidArgument'] {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2374.762719] env[62476]: DEBUG nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Unplugging VIFs for instance {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2374.762922] env[62476]: DEBUG nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62476) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2374.763113] env[62476]: DEBUG nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2374.763280] env[62476]: DEBUG nova.network.neutron [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2375.270028] env[62476]: DEBUG nova.network.neutron [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2375.284533] env[62476]: INFO nova.compute.manager [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Took 0.52 seconds to deallocate network for instance. [ 2375.401247] env[62476]: INFO nova.scheduler.client.report [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Deleted allocations for instance 3462762c-09da-473b-b2ba-4dce6c99dd8d [ 2375.430061] env[62476]: DEBUG oslo_concurrency.lockutils [None req-76e2cbd9-04d4-4963-ac11-bf4ebb668d9e tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "3462762c-09da-473b-b2ba-4dce6c99dd8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 635.353s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2375.430633] env[62476]: DEBUG oslo_concurrency.lockutils [None req-678522f4-114e-4346-bfdc-83de56ff3870 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "3462762c-09da-473b-b2ba-4dce6c99dd8d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 439.313s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2375.430633] env[62476]: DEBUG oslo_concurrency.lockutils [None req-678522f4-114e-4346-bfdc-83de56ff3870 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Acquiring lock "3462762c-09da-473b-b2ba-4dce6c99dd8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2375.430745] env[62476]: DEBUG oslo_concurrency.lockutils [None req-678522f4-114e-4346-bfdc-83de56ff3870 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "3462762c-09da-473b-b2ba-4dce6c99dd8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2375.430867] env[62476]: DEBUG oslo_concurrency.lockutils [None req-678522f4-114e-4346-bfdc-83de56ff3870 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "3462762c-09da-473b-b2ba-4dce6c99dd8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2375.433492] env[62476]: INFO nova.compute.manager [None req-678522f4-114e-4346-bfdc-83de56ff3870 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Terminating instance [ 2375.435408] env[62476]: DEBUG nova.compute.manager [None req-678522f4-114e-4346-bfdc-83de56ff3870 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Start destroying the instance on the hypervisor. {{(pid=62476) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2375.435611] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-678522f4-114e-4346-bfdc-83de56ff3870 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Destroying instance {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2375.435909] env[62476]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a00adefe-6154-44af-85d3-2658b267c8f8 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.447285] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec24dac6-c816-4b15-b830-94f0dc4c09f0 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.475193] env[62476]: WARNING nova.virt.vmwareapi.vmops [None req-678522f4-114e-4346-bfdc-83de56ff3870 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3462762c-09da-473b-b2ba-4dce6c99dd8d could not be found. [ 2375.475458] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-678522f4-114e-4346-bfdc-83de56ff3870 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Instance destroyed {{(pid=62476) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2375.475649] env[62476]: INFO nova.compute.manager [None req-678522f4-114e-4346-bfdc-83de56ff3870 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2375.475897] env[62476]: DEBUG oslo.service.loopingcall [None req-678522f4-114e-4346-bfdc-83de56ff3870 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2375.476143] env[62476]: DEBUG nova.compute.manager [-] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Deallocating network for instance {{(pid=62476) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2375.476240] env[62476]: DEBUG nova.network.neutron [-] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] deallocate_for_instance() {{(pid=62476) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2375.501446] env[62476]: DEBUG nova.network.neutron [-] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Updating instance_info_cache with network_info: [] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2375.510430] env[62476]: INFO nova.compute.manager [-] [instance: 3462762c-09da-473b-b2ba-4dce6c99dd8d] Took 0.03 seconds to deallocate network for instance. [ 2375.607095] env[62476]: DEBUG oslo_concurrency.lockutils [None req-678522f4-114e-4346-bfdc-83de56ff3870 tempest-ServerRescueNegativeTestJSON-2011157693 tempest-ServerRescueNegativeTestJSON-2011157693-project-member] Lock "3462762c-09da-473b-b2ba-4dce6c99dd8d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.176s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2387.026809] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager.update_available_resource {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2387.038962] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2387.039207] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2387.039380] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2387.039552] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62476) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2387.040996] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a61365-a5c7-4d8a-9242-75960c769caa {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.049764] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e4b385-29c2-4c68-b1a6-f136aed36403 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.063692] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a436fa-b18c-44d7-9f9f-16875cdda034 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.070114] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d52a8c8-b67b-4c70-b7c7-d1c4f1b35ea1 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.099592] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180688MB free_disk=97GB free_vcpus=48 pci_devices=None {{(pid=62476) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2387.099750] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2387.099952] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2387.243049] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 8fdd45f2-0c21-461f-896e-698182bd5337 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2387.243228] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 11af6076-e985-477c-98a6-437843b26b02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2387.243362] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 56a5da15-57da-4d4d-a359-d90b780f67e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2387.243486] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2387.243607] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Instance 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=62476) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2387.243798] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2387.243941] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=100GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62476) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2387.259213] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing inventories for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2387.272414] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Updating ProviderTree inventory for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2387.272621] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Updating inventory in ProviderTree for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2387.283273] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing aggregate associations for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11, aggregates: None {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2387.300296] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Refreshing trait associations for resource provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=62476) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2387.368907] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caec1f8a-bc6f-4e1d-b414-2db3eeb03050 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.376661] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c245a89f-3902-44a3-b7fe-4e5270190acc {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.406828] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc3cbc9-7339-4187-9941-736fe0cc2a93 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.414098] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07fbc141-4035-4222-8648-207734803322 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.427397] env[62476]: DEBUG nova.compute.provider_tree [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2387.436571] env[62476]: DEBUG nova.scheduler.client.report [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2387.452204] env[62476]: DEBUG nova.compute.resource_tracker [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62476) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2387.452483] env[62476]: DEBUG oslo_concurrency.lockutils [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.352s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2390.453441] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2390.453822] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2392.026880] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2392.026880] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Starting heal instance info cache {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2392.027309] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Rebuilding the list of instances to heal {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2392.044129] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 8fdd45f2-0c21-461f-896e-698182bd5337] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2392.044129] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 11af6076-e985-477c-98a6-437843b26b02] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2392.044129] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 56a5da15-57da-4d4d-a359-d90b780f67e4] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2392.044637] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 63657e6f-8e2e-41e0-ad6e-2a13a90bf7de] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2392.044966] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 3efb9939-d7ad-42dc-b3bc-472a2e34f7a1] Skipping network cache update for instance because it is Building. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2392.045282] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Didn't find any instances for network info cache update. {{(pid=62476) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2392.045985] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2392.046460] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62476) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2394.027729] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.028150] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Cleaning up deleted instances with incomplete migration {{(pid=62476) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 2396.032855] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2397.027621] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2400.027115] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2401.027427] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2403.027784] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2403.028188] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Cleaning up deleted instances {{(pid=62476) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 2403.042417] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] There are 1 instances to clean {{(pid=62476) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 2403.042730] env[62476]: DEBUG nova.compute.manager [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] [instance: 653f1f54-0432-4138-a577-259fbaa16cc5] Instance has had 0 of 5 cleanup attempts {{(pid=62476) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11264}} [ 2406.500689] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2406.500689] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Getting list of instances from cluster (obj){ [ 2406.500689] env[62476]: value = "domain-c8" [ 2406.500689] env[62476]: _type = "ClusterComputeResource" [ 2406.500689] env[62476]: } {{(pid=62476) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2406.502296] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85e59e1-38d5-45c6-8764-056d87578984 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.515254] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Got total of 5 instances {{(pid=62476) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2407.055116] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2411.027579] env[62476]: DEBUG oslo_service.periodic_task [None req-c7b9afd8-29dd-4807-90e7-0df6b2bf27bd None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62476) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2413.148444] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Acquiring lock "d024aaff-1124-426f-9d9b-e9e34981ae5c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2413.148771] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Lock "d024aaff-1124-426f-9d9b-e9e34981ae5c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2413.160732] env[62476]: DEBUG nova.compute.manager [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Starting instance... {{(pid=62476) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2413.215586] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2413.215847] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2413.217687] env[62476]: INFO nova.compute.claims [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2413.346654] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a792442e-21a0-45d7-8264-946bc8f80784 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.354849] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58f53ec-8ba4-4011-a108-8a97d1f6c80a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.384512] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348ff70c-3f4a-45e8-9c98-9cbf79f98a59 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.392447] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0f39e2-1fea-4000-a2b7-7bb6559cb52f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.405915] env[62476]: DEBUG nova.compute.provider_tree [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Inventory has not changed in ProviderTree for provider: 0cae7a3c-64e3-4b86-8a81-24d587f58f11 {{(pid=62476) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2413.415972] env[62476]: DEBUG nova.scheduler.client.report [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Inventory has not changed for provider 0cae7a3c-64e3-4b86-8a81-24d587f58f11 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 200, 'reserved': 0, 'min_unit': 1, 'max_unit': 97, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62476) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2413.429874] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.214s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2413.430401] env[62476]: DEBUG nova.compute.manager [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Start building networks asynchronously for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2413.462017] env[62476]: DEBUG nova.compute.utils [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Using /dev/sd instead of None {{(pid=62476) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2413.463527] env[62476]: DEBUG nova.compute.manager [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Allocating IP information in the background. {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2413.463702] env[62476]: DEBUG nova.network.neutron [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] allocate_for_instance() {{(pid=62476) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2413.473648] env[62476]: DEBUG nova.compute.manager [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Start building block device mappings for instance. {{(pid=62476) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2413.529887] env[62476]: DEBUG nova.policy [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '51aa02bb178f49779e7ae4262db0bcf1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43c3514f06db4f73bb8107310a9e8d2e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62476) authorize /opt/stack/nova/nova/policy.py:203}} [ 2413.537842] env[62476]: DEBUG nova.compute.manager [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Start spawning the instance on the hypervisor. {{(pid=62476) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2413.564162] env[62476]: DEBUG nova.virt.hardware [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-07-18T15:29:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-07-18T15:29:38Z,direct_url=,disk_format='vmdk',id=3d3a3f52-0fe8-4da4-8b0f-daf95adf26d7,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='5833896e0452492db476be34cc38d300',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-07-18T15:29:38Z,virtual_size=,visibility=), allow threads: False {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2413.564417] env[62476]: DEBUG nova.virt.hardware [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Flavor limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2413.564578] env[62476]: DEBUG nova.virt.hardware [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Image limits 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2413.564761] env[62476]: DEBUG nova.virt.hardware [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Flavor pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2413.564909] env[62476]: DEBUG nova.virt.hardware [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Image pref 0:0:0 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2413.565069] env[62476]: DEBUG nova.virt.hardware [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62476) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2413.565283] env[62476]: DEBUG nova.virt.hardware [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2413.565495] env[62476]: DEBUG nova.virt.hardware [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2413.565679] env[62476]: DEBUG nova.virt.hardware [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Got 1 possible topologies {{(pid=62476) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2413.565846] env[62476]: DEBUG nova.virt.hardware [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2413.566031] env[62476]: DEBUG nova.virt.hardware [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62476) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2413.566900] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c97f64-dc6f-4688-a313-6df44913e22a {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.575410] env[62476]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3623ff7d-812a-40d4-b47b-4ada7046356f {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.875161] env[62476]: DEBUG nova.network.neutron [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Successfully created port: c8032320-36f6-4955-9a18-a1eb8591d24e {{(pid=62476) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2414.491294] env[62476]: DEBUG nova.compute.manager [req-e7e867a7-6bd6-4eac-9dec-1930ec12804c req-15016889-744f-4434-96e7-1d701247f635 service nova] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Received event network-vif-plugged-c8032320-36f6-4955-9a18-a1eb8591d24e {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2414.491664] env[62476]: DEBUG oslo_concurrency.lockutils [req-e7e867a7-6bd6-4eac-9dec-1930ec12804c req-15016889-744f-4434-96e7-1d701247f635 service nova] Acquiring lock "d024aaff-1124-426f-9d9b-e9e34981ae5c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2414.491752] env[62476]: DEBUG oslo_concurrency.lockutils [req-e7e867a7-6bd6-4eac-9dec-1930ec12804c req-15016889-744f-4434-96e7-1d701247f635 service nova] Lock "d024aaff-1124-426f-9d9b-e9e34981ae5c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2414.491886] env[62476]: DEBUG oslo_concurrency.lockutils [req-e7e867a7-6bd6-4eac-9dec-1930ec12804c req-15016889-744f-4434-96e7-1d701247f635 service nova] Lock "d024aaff-1124-426f-9d9b-e9e34981ae5c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62476) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2414.492094] env[62476]: DEBUG nova.compute.manager [req-e7e867a7-6bd6-4eac-9dec-1930ec12804c req-15016889-744f-4434-96e7-1d701247f635 service nova] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] No waiting events found dispatching network-vif-plugged-c8032320-36f6-4955-9a18-a1eb8591d24e {{(pid=62476) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2414.492244] env[62476]: WARNING nova.compute.manager [req-e7e867a7-6bd6-4eac-9dec-1930ec12804c req-15016889-744f-4434-96e7-1d701247f635 service nova] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Received unexpected event network-vif-plugged-c8032320-36f6-4955-9a18-a1eb8591d24e for instance with vm_state building and task_state spawning. [ 2414.582265] env[62476]: DEBUG nova.network.neutron [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Successfully updated port: c8032320-36f6-4955-9a18-a1eb8591d24e {{(pid=62476) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2414.604344] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Acquiring lock "refresh_cache-d024aaff-1124-426f-9d9b-e9e34981ae5c" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2414.604631] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Acquired lock "refresh_cache-d024aaff-1124-426f-9d9b-e9e34981ae5c" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2414.604631] env[62476]: DEBUG nova.network.neutron [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Building network info cache for instance {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2414.656795] env[62476]: DEBUG nova.network.neutron [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Instance cache missing network info. {{(pid=62476) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2414.850280] env[62476]: DEBUG nova.network.neutron [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Updating instance_info_cache with network_info: [{"id": "c8032320-36f6-4955-9a18-a1eb8591d24e", "address": "fa:16:3e:1e:89:c0", "network": {"id": "f3c02f39-695a-4c59-8067-f729d5445329", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2118224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43c3514f06db4f73bb8107310a9e8d2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8032320-36", "ovs_interfaceid": "c8032320-36f6-4955-9a18-a1eb8591d24e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62476) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2414.864810] env[62476]: DEBUG oslo_concurrency.lockutils [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Releasing lock "refresh_cache-d024aaff-1124-426f-9d9b-e9e34981ae5c" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2414.865093] env[62476]: DEBUG nova.compute.manager [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Instance network_info: |[{"id": "c8032320-36f6-4955-9a18-a1eb8591d24e", "address": "fa:16:3e:1e:89:c0", "network": {"id": "f3c02f39-695a-4c59-8067-f729d5445329", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2118224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43c3514f06db4f73bb8107310a9e8d2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8032320-36", "ovs_interfaceid": "c8032320-36f6-4955-9a18-a1eb8591d24e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62476) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2414.865533] env[62476]: DEBUG nova.virt.vmwareapi.vmops [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:89:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2907cce-d529-4809-af05-d29397bed211', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8032320-36f6-4955-9a18-a1eb8591d24e', 'vif_model': 'vmxnet3'}] {{(pid=62476) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2414.873734] env[62476]: DEBUG oslo.service.loopingcall [None req-f08facb1-e3b0-49f1-ae45-de88e77c7188 tempest-ServerDiskConfigTestJSON-1480244007 tempest-ServerDiskConfigTestJSON-1480244007-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62476) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2414.874243] env[62476]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Creating VM on the ESX host {{(pid=62476) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2414.874760] env[62476]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c88af915-b0d6-42a2-a671-db52bfacca65 {{(pid=62476) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.897648] env[62476]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2414.897648] env[62476]: value = "task-4319243" [ 2414.897648] env[62476]: _type = "Task" [ 2414.897648] env[62476]: } to complete. {{(pid=62476) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2414.906120] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319243, 'name': CreateVM_Task} progress is 0%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2415.410043] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319243, 'name': CreateVM_Task} progress is 25%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2415.908647] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319243, 'name': CreateVM_Task} progress is 25%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2416.410616] env[62476]: DEBUG oslo_vmware.api [-] Task: {'id': task-4319243, 'name': CreateVM_Task} progress is 25%. {{(pid=62476) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2416.525764] env[62476]: DEBUG nova.compute.manager [req-cce6c645-56e6-458c-8fdf-22754e86dd7a req-94e3cfc2-bae0-4ef6-966b-e364baa9f992 service nova] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Received event network-changed-c8032320-36f6-4955-9a18-a1eb8591d24e {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2416.526220] env[62476]: DEBUG nova.compute.manager [req-cce6c645-56e6-458c-8fdf-22754e86dd7a req-94e3cfc2-bae0-4ef6-966b-e364baa9f992 service nova] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Refreshing instance network info cache due to event network-changed-c8032320-36f6-4955-9a18-a1eb8591d24e. {{(pid=62476) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2416.526602] env[62476]: DEBUG oslo_concurrency.lockutils [req-cce6c645-56e6-458c-8fdf-22754e86dd7a req-94e3cfc2-bae0-4ef6-966b-e364baa9f992 service nova] Acquiring lock "refresh_cache-d024aaff-1124-426f-9d9b-e9e34981ae5c" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2416.526812] env[62476]: DEBUG oslo_concurrency.lockutils [req-cce6c645-56e6-458c-8fdf-22754e86dd7a req-94e3cfc2-bae0-4ef6-966b-e364baa9f992 service nova] Acquired lock "refresh_cache-d024aaff-1124-426f-9d9b-e9e34981ae5c" {{(pid=62476) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2416.527081] env[62476]: DEBUG nova.network.neutron [req-cce6c645-56e6-458c-8fdf-22754e86dd7a req-94e3cfc2-bae0-4ef6-966b-e364baa9f992 service nova] [instance: d024aaff-1124-426f-9d9b-e9e34981ae5c] Refreshing network info cache for port c8032320-36f6-4955-9a18-a1eb8591d24e {{(pid=62476) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}}